From 93a2ada8e29aa95a6b2cbe6d8177a74800307328 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 17:36:49 -0700 Subject: [PATCH 001/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20split=20dev=20comp?= =?UTF-8?q?ose=20files?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...er-compose.yaml => dev-docker-compose.yaml | 0 prod-docker-compose.yaml | 31 +++++++++++++++++++ 2 files changed, 31 insertions(+) rename docker-compose.yaml => dev-docker-compose.yaml (100%) create mode 100644 prod-docker-compose.yaml diff --git a/docker-compose.yaml b/dev-docker-compose.yaml similarity index 100% rename from docker-compose.yaml rename to dev-docker-compose.yaml diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml new file mode 100644 index 00000000..e0afcb93 --- /dev/null +++ b/prod-docker-compose.yaml @@ -0,0 +1,31 @@ +version: '3' +services: + flask-api: + build: + context: . + dockerfile: Dockerfile + # image: fairhub-flask-api:local + entrypoint: + - flask + - run + - --host=0.0.0.0 + - --port=5000 + ports: + - 5000:5000 + environment: + FLASK_DEBUG: 0 + FLASK_APP: ./app.py + FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" +# database: +# image: postgres:15.3 +# environment: +# - POSTGRES_USER=POSTGRES_USER +# - POSTGRES_PASSWORD=POSTGRES_PASSWORD +# - POSTGRES_DB=POSTGRES_DB +# ports: +# - 5432:5432 +# restart: always +# volumes: +# - db-data:/var/lib/postgresql/data +# volumes: +# db-data: From 42122c327f33096114b878b86d586a8388c91a57 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 18:48:56 -0700 Subject: [PATCH 002/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .dockerignore | 12 ------------ Dockerfile | 4 +--- apis/__init__.py | 2 +- app.py | 4 +--- dev-docker-compose.yaml | 7 ++++--- prod-docker-compose.yaml | 6 ++++-- 6 files changed, 11 insertions(+), 24 deletions(-) diff --git a/.dockerignore b/.dockerignore index 5d1cc910..2b3ef872 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,21 +1,9 @@ -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* - -node_modules .DS_Store dist dist-ssr coverage *.local -/cypress/videos/ -/cypress/screenshots/ # Editor directories and files .vscode/* diff --git a/Dockerfile b/Dockerfile index 9c948dc2..95d034fc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,6 +14,4 @@ COPY poetry.lock pyproject.toml ./ RUN poetry config virtualenvs.create false RUN poetry install -COPY . . - -CMD ["python3", "-m" , "flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file +CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file diff --git a/apis/__init__.py b/apis/__init__.py index d24d0744..21408c47 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -9,7 +9,7 @@ api = Api( title="FAIRHUB", - description="The backend api system for the fairhub app", + description="The backend api system for the fairhub vue app", doc="/docs", ) diff --git a/app.py b/app.py index 67f8f452..2fc023a9 100644 --- a/app.py +++ b/app.py @@ -1,6 +1,4 @@ """Entry point for the application.""" -import os - from flask import Flask from flask_cors import CORS @@ -74,4 +72,4 @@ def create_schema(): app = create_app() - app.run(host="0.0.0.0", port=port) + app.run(host="0.0.0.0", port=port, debug=True) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 34ee0b6f..d31e9dd3 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -10,12 +10,13 @@ services: - run - --host=0.0.0.0 - --port=5000 - - --debug ports: - 5000:5000 + volumes: + - ./:/app environment: - FLASK_DEBUG: 0 - FLASK_APP: ./app.py + FLASK_ENV: development + FLASK_DEBUG: 1 FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" # database: # image: postgres:15.3 diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml index e0afcb93..440c3a43 100644 --- a/prod-docker-compose.yaml +++ b/prod-docker-compose.yaml @@ -7,14 +7,16 @@ services: # image: fairhub-flask-api:local entrypoint: - flask - - run - --host=0.0.0.0 - --port=5000 + - run ports: - 5000:5000 + volumes: + - ./:/app environment: + FLASK_ENV: production FLASK_DEBUG: 0 - FLASK_APP: ./app.py FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" # database: # image: postgres:15.3 From 05f7c450fa5019a74c016bab281a459b4e24739a Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 18:52:49 -0700 Subject: [PATCH 003/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev-docker-compose.yaml | 3 ++- prod-docker-compose.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index d31e9dd3..afcfbf0b 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -7,9 +7,10 @@ services: # image: fairhub-flask-api:local entrypoint: - flask - - run - --host=0.0.0.0 - --port=5000 + - --debug + - run ports: - 5000:5000 volumes: diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml index 440c3a43..e20b26f5 100644 --- a/prod-docker-compose.yaml +++ b/prod-docker-compose.yaml @@ -7,9 +7,9 @@ services: # image: fairhub-flask-api:local entrypoint: - flask + - run - --host=0.0.0.0 - --port=5000 - - run ports: - 5000:5000 volumes: From b2fd43769ce75fd79c0739146408946c0a8ea7d4 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 18:54:50 -0700 Subject: [PATCH 004/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- prod-docker-compose.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml index e20b26f5..52ed7fa2 100644 --- a/prod-docker-compose.yaml +++ b/prod-docker-compose.yaml @@ -17,6 +17,7 @@ services: environment: FLASK_ENV: production FLASK_DEBUG: 0 + FLASK_APP: ./app.py FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" # database: # image: postgres:15.3 From 77642b95e395d2b47c325f2f137963e29bf30e3e Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 18:55:51 -0700 Subject: [PATCH 005/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- prod-docker-compose.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml index 52ed7fa2..cf1e4987 100644 --- a/prod-docker-compose.yaml +++ b/prod-docker-compose.yaml @@ -15,7 +15,6 @@ services: volumes: - ./:/app environment: - FLASK_ENV: production FLASK_DEBUG: 0 FLASK_APP: ./app.py FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" From 616f130a5235c668ae731d02aa29f9394c9dd661 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 18:58:11 -0700 Subject: [PATCH 006/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- dev-docker-compose.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app.py b/app.py index 2fc023a9..a4f402e6 100644 --- a/app.py +++ b/app.py @@ -72,4 +72,4 @@ def create_schema(): app = create_app() - app.run(host="0.0.0.0", port=port, debug=True) + app.run(host="0.0.0.0", port=port) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index afcfbf0b..1eda0cc2 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -7,10 +7,10 @@ services: # image: fairhub-flask-api:local entrypoint: - flask - - --host=0.0.0.0 - - --port=5000 - --debug - run + - --port=5000 + - --host=0.0.0.0 ports: - 5000:5000 volumes: From f514e4a393bfa67c132a1a6c52aa06e52c015bfc Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 18:58:49 -0700 Subject: [PATCH 007/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 95d034fc..e3620299 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,4 +14,6 @@ COPY poetry.lock pyproject.toml ./ RUN poetry config virtualenvs.create false RUN poetry install +COPY . . + CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file From ed2af81ebddf3d3b917541dc6351a3e75552c63a Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 19:00:28 -0700 Subject: [PATCH 008/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev-docker-compose.yaml | 2 -- prod-docker-compose.yaml | 2 -- 2 files changed, 4 deletions(-) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 1eda0cc2..666be792 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -13,8 +13,6 @@ services: - --host=0.0.0.0 ports: - 5000:5000 - volumes: - - ./:/app environment: FLASK_ENV: development FLASK_DEBUG: 1 diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml index cf1e4987..e0afcb93 100644 --- a/prod-docker-compose.yaml +++ b/prod-docker-compose.yaml @@ -12,8 +12,6 @@ services: - --port=5000 ports: - 5000:5000 - volumes: - - ./:/app environment: FLASK_DEBUG: 0 FLASK_APP: ./app.py From de0dee5a57f2e823f9707d8dfddd843cb7a6975d Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 2 Aug 2023 19:01:23 -0700 Subject: [PATCH 009/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20compose?= =?UTF-8?q?=20files=20for=20dev?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev-docker-compose.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 666be792..1eda0cc2 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -13,6 +13,8 @@ services: - --host=0.0.0.0 ports: - 5000:5000 + volumes: + - ./:/app environment: FLASK_ENV: development FLASK_DEBUG: 1 From 7f9b322df272959a33d10cc015d1307cc4aaa311 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 3 Aug 2023 12:03:44 -0700 Subject: [PATCH 010/505] wip: adding dataset endpoint --- apis/dataset.py | 25 +++++++++++------ apis/participant.py | 3 +++ apis/study.py | 5 ++-- model/invited_study_contributor.py | 43 ++++++++++++++++++++++++++++++ 4 files changed, 66 insertions(+), 10 deletions(-) create mode 100644 model/invited_study_contributor.py diff --git a/apis/dataset.py b/apis/dataset.py index 2f3dfc12..c1411536 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -79,9 +79,11 @@ def get(self, study_id): def post(self, study_id): data = request.json study = Study.query.get(study_id) - # &&study_id + # study_id # todo if study.participant id== different study Throw error # query based on part id and study id (prolly filter but need to find right syntax) + # data["participants"] = [(Participant.filter_by(i="study_id".first()),Participant.filter_by + # (i="participant_id".first()) )for i in data["participants"]] data["participants"] = [ Participant.query.get(i).first() for i in data["participants"] ] @@ -101,8 +103,6 @@ class UpdateDataset(Resource): @api.param("id", "Adding version") @api.marshal_with(dataset_version) def get(self, study_id, dataset_id, version_id): - # if int(study_id) not in dic: - # return "not found", 404 dataset_version = DatasetVersion.query.get(version_id) return jsonify(dataset_version.to_dict()) @@ -125,7 +125,7 @@ def delete(self, study_id, dataset_id, version_id): @api.route("/study//dataset//version") @api.response(201, "Success") @api.response(400, "Validation Error") -class PostDataset(Resource): +class PostDatasetVersion(Resource): def post(self, study_id: int, dataset_id: int): data = request.json data["participants"] = [Participant.query.get(i) for i in data["participants"]] @@ -136,7 +136,16 @@ def post(self, study_id: int, dataset_id: int): return jsonify(dataset_version.to_dict()) -# @dataset.route("/study//dataset/", methods=["POST"]) -# def update_dataset(study_id, dataset_id): -# pass -# +@api.route("/study//dataset/") +@api.response(201, "Success") +@api.response(400, "Validation Error") +class PostDataset(Resource): + def put(study_id, dataset_id): + data = request.json + data["participants"] = [Participant.query.get(i) for i in data["participants"]] + data_obj = Dataset.query.get(dataset_id) + dataset_ = Dataset.from_data(data_obj, data) + db.session.add(dataset_) + db.session.commit() + return jsonify(dataset_.to_dict()) + diff --git a/apis/participant.py b/apis/participant.py index 45ea5194..7c62ae36 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -39,8 +39,11 @@ def post(self, study_id: int): @api.route("/study//participants/") class UpdateParticipant(Resource): + @api.doc("participants") @api.response(200, "Success") @api.response(400, "Validation Error") + @api.param("id", "Adding participants") + @api.marshal_with(participants) def put(self, study_id, participant_id: int): update_participant = Participant.query.get(participant_id) update_participant.update(request.json) diff --git a/apis/study.py b/apis/study.py index 722eea06..04971ff2 100644 --- a/apis/study.py +++ b/apis/study.py @@ -42,7 +42,7 @@ class Studies(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The study identifier") - # @api.marshal_list_with(study) + @api.marshal_list_with(study) def get(self): studies = Study.query.all() return [s.to_dict() for s in studies] @@ -56,10 +56,11 @@ def post(self): @api.route("/study/") class StudyResource(Resource): - @api.doc("update study") + @api.doc("get study") @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The study identifier") + @api.marshal_list_with(study) # @api.marshal_with(study) def get(self, study_id: int): study1 = Study.query.get(study_id) diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py new file mode 100644 index 00000000..5ea71a90 --- /dev/null +++ b/model/invited_study_contributor.py @@ -0,0 +1,43 @@ +import uuid + +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + +from .db import db + + +class InvitedStudyContributor(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "invited_study_contributor" + email_address = db.Column(ARRAY(String), nullable=False) + permission = db.Column(db.String, nullable=False) + date = db.Column(db.String, nullable=False) + last_name = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study") + def to_dict(self): + return { + "id": self.id, + "affiliations": self.affiliations, + "email": self.email, + "first_name": self.first_name, + "last_name": self.last_name, + "orcid": self.ORCID, + "roles": self.roles, + "status": self.status, + } + + @staticmethod + def from_data(data: dict): + version_contributor = InvitedStudyContributor() + version_contributor.affiliations = data["affiliations"] + version_contributor.email = data["email"] + version_contributor.first_name = data["first_name"] + version_contributor.last_name = data["last_name"] + version_contributor.orcid = data["orcid"] + version_contributor.roles = data["roles"] + version_contributor.status = data["status"] + return version_contributor From 329e7695c2c9fba1f6f46ba065e1af8dc93eaf47 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 3 Aug 2023 19:04:24 +0000 Subject: [PATCH 011/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dataset.py | 1 - model/invited_study_contributor.py | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/dataset.py b/apis/dataset.py index c1411536..c872f9bd 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -148,4 +148,3 @@ def put(study_id, dataset_id): db.session.add(dataset_) db.session.commit() return jsonify(dataset_.to_dict()) - diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 5ea71a90..c452ef46 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -18,6 +18,7 @@ def __init__(self): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study") + def to_dict(self): return { "id": self.id, From 7762a22d4d7de15b1308f8810a99da4fd3309893 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 3 Aug 2023 19:08:19 -0700 Subject: [PATCH 012/505] wip: removed marshalls --- apis/contributor.py | 2 +- apis/dataset.py | 30 ++++++++++++++++-------------- apis/participant.py | 4 ++-- apis/study.py | 5 +---- model/__init__.py | 2 ++ model/invited_study_contributor.py | 21 ++++++++------------- 6 files changed, 30 insertions(+), 34 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index e6255ab3..4c27a7d7 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -27,7 +27,7 @@ class AddParticipant(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The contributor identifier") - @api.marshal_with(contributors_model) + # @api.marshal_with(contributors_model) # @api.marshal_with(contributor) def get(self, study_id: int): contributors = User.query.all() diff --git a/apis/dataset.py b/apis/dataset.py index c1411536..a0f4f15f 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -4,18 +4,22 @@ from model import Dataset, DatasetVersion, Participant, Study, db api = Namespace("dataset", description="dataset operations", path="/") + + dataset = api.model( "Dataset", { "id": fields.String(required=True), + "last_modified": fields.String(required=True), + "last_published": fields.String(required=True), + "latest_version": fields.String(required=True), "name": fields.String(required=True), - "title": fields.String(required=True), - "description": fields.String(required=True), + "published_version": fields.String(required=True), }, ) contributors = api.model( - "DatasetVersion", + "Contributors", { "id": fields.String(required=True), "affiliations": fields.String(required=True), @@ -30,7 +34,7 @@ ) participants = api.model( - "DatasetVersion", + "Participants", { "id": fields.Boolean(required=True), "first_name": fields.String(required=True), @@ -42,7 +46,7 @@ dataset_version = api.model( - "Dataset", + "DatasetVersion", { "id": fields.String(required=True), "title": fields.String(required=True), @@ -53,8 +57,6 @@ "published": fields.Boolean(required=True), "doi": fields.String(required=True), "name": fields.String(required=True), - "contributors": fields.Nested(contributors, required=True), - "participants": fields.Nested(participants, required=True), }, ) @@ -63,9 +65,9 @@ class AddDataset(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("dataset") - @api.param("id", "Adding dataset") - @api.marshal_with(dataset) + @api.doc("add dataset", params={'id': 'An ID'}) + # @api.marshal_list_with(dataset) + # @api.expect(body=dataset) def get(self, study_id): study = Study.query.get(study_id) datasets = Dataset.query.filter_by(study=study) @@ -73,9 +75,8 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("dataset") - @api.param("id", "Adding dataset") - @api.marshal_with(dataset) + @api.doc("update dataset") + def post(self, study_id): data = request.json study = Study.query.get(study_id) @@ -101,7 +102,7 @@ class UpdateDataset(Resource): @api.response(400, "Validation Error") @api.doc("dataset version") @api.param("id", "Adding version") - @api.marshal_with(dataset_version) + # @api.marshal_with(dataset_version) def get(self, study_id, dataset_id, version_id): dataset_version = DatasetVersion.query.get(version_id) return jsonify(dataset_version.to_dict()) @@ -136,6 +137,7 @@ def post(self, study_id: int, dataset_id: int): return jsonify(dataset_version.to_dict()) +#TODO not finalized endpoint. have to set functionality @api.route("/study//dataset/") @api.response(201, "Success") @api.response(400, "Validation Error") diff --git a/apis/participant.py b/apis/participant.py index 7c62ae36..dbcf2a42 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -24,7 +24,7 @@ class AddParticipant(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "Adding participants") - @api.marshal_with(participants) + # @api.marshal_with(participants) def get(self, study_id: int): participants = Participant.query.all() return [p.to_dict() for p in participants] @@ -43,7 +43,7 @@ class UpdateParticipant(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "Adding participants") - @api.marshal_with(participants) + # @api.marshal_with(participants) def put(self, study_id, participant_id: int): update_participant = Participant.query.get(participant_id) update_participant.update(request.json) diff --git a/apis/study.py b/apis/study.py index 04971ff2..d5647c6d 100644 --- a/apis/study.py +++ b/apis/study.py @@ -42,7 +42,7 @@ class Studies(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The study identifier") - @api.marshal_list_with(study) + # @api.marshal_with(study) def get(self): studies = Study.query.all() return [s.to_dict() for s in studies] @@ -60,7 +60,6 @@ class StudyResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The study identifier") - @api.marshal_list_with(study) # @api.marshal_with(study) def get(self, study_id: int): study1 = Study.query.get(study_id) @@ -68,8 +67,6 @@ def get(self, study_id: int): def put(self, study_id: int): update_study = Study.query.get(study_id) - # if not addStudy.validate(): - # return 'error', 422 update_study.update(request.json) db.session.commit() return update_study.to_dict() diff --git a/model/__init__.py b/model/__init__.py index c755d665..421cc9e7 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -5,6 +5,7 @@ from .participant import Participant from .study import Study from .user import User +# from .invited_study_contributor import InvitedStudyContributor __all__ = [ "Study", @@ -14,4 +15,5 @@ "Participant", "db", "User", + # "InvitedStudyContributor" ] diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 5ea71a90..8a38bd40 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -11,23 +11,20 @@ def __init__(self): self.id = str(uuid.uuid4()) __tablename__ = "invited_study_contributor" - email_address = db.Column(ARRAY(String), nullable=False) + email_address = db.Column(String, nullable=False) permission = db.Column(db.String, nullable=False) - date = db.Column(db.String, nullable=False) - last_name = db.Column(db.String, nullable=False) + invited_on = db.Column(db.DateTime, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) study = db.relationship("Study") def to_dict(self): return { - "id": self.id, - "affiliations": self.affiliations, - "email": self.email, - "first_name": self.first_name, - "last_name": self.last_name, + "email_address": self.id, + "permission": self.affiliations, + "date": self.email, + "invited_on": self.first_name, "orcid": self.ORCID, - "roles": self.roles, - "status": self.status, + } @staticmethod @@ -38,6 +35,4 @@ def from_data(data: dict): version_contributor.first_name = data["first_name"] version_contributor.last_name = data["last_name"] version_contributor.orcid = data["orcid"] - version_contributor.roles = data["roles"] - version_contributor.status = data["status"] return version_contributor From 8b53eb5019a16b017e3daa5a7c449257774fe788 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 4 Aug 2023 02:09:59 +0000 Subject: [PATCH 013/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dataset.py | 5 ++--- model/__init__.py | 1 + model/invited_study_contributor.py | 1 - 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/apis/dataset.py b/apis/dataset.py index 245dbcbc..aa5bd747 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -65,7 +65,7 @@ class AddDataset(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("add dataset", params={'id': 'An ID'}) + @api.doc("add dataset", params={"id": "An ID"}) # @api.marshal_list_with(dataset) # @api.expect(body=dataset) def get(self, study_id): @@ -76,7 +76,6 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("update dataset") - def post(self, study_id): data = request.json study = Study.query.get(study_id) @@ -137,7 +136,7 @@ def post(self, study_id: int, dataset_id: int): return jsonify(dataset_version.to_dict()) -#TODO not finalized endpoint. have to set functionality +# TODO not finalized endpoint. have to set functionality @api.route("/study//dataset/") @api.response(201, "Success") @api.response(400, "Validation Error") diff --git a/model/__init__.py b/model/__init__.py index 421cc9e7..cabedef6 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -5,6 +5,7 @@ from .participant import Participant from .study import Study from .user import User + # from .invited_study_contributor import InvitedStudyContributor __all__ = [ diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 41d1cd65..699b6f6c 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -25,7 +25,6 @@ def to_dict(self): "date": self.email, "invited_on": self.first_name, "orcid": self.ORCID, - } @staticmethod From 491713893178dd04913a146b4e921df2001d1ef0 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Sat, 5 Aug 2023 21:15:55 -0700 Subject: [PATCH 014/505] Update README.md --- README.md | Bin 4866 -> 2472 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/README.md b/README.md index 926c129c290ba57335937ecdf845bc598925ded5..301c05baff8e3f3769be461c236e8f977feb8c8e 100644 GIT binary patch literal 2472 zcma)8+iv4F5Pc7je;6b$4H{8SyxR?exPY_Cwg%E7$OT2QC~R69TdXBgOHy(DZGGq` z^%purN^2#x(Z0wQHTOAlhJXM0*D=UiiG@_QXy!th!-K#Uq9cg`nPWi?`wCNrnRa?y{RRIm!eL|(7eR-BnlezcGHs$hLSoQ z3uBk9+>OarXt#3f^;+cNOk}2FA^7zf2Y!Cag_U=1{5?f;Z6Ax%BjpQlqk+ zLS3SCU7F#T^R!LomC+97NZAXvb|&DXMb?#2noO4PqO5P|7d8DexKr8LzObP3 zp*8?8o6Y9Z6^94(zmA1UDtIAH5c~`u9Mv?#c#zJOPCG;4E1BRh$UvKyv#cpK8wo!UdM4BP>yPFK8Vh15VB}tYP2&cLE;z^$jDQz4n+H zf&rm#kIP^**re8n7f2G)8NG4dcl$xWb)$&5xUp^FI`hw44(s5jn`$0#MDW_w*8Gl{ z7v5AQA&d}0hp*C1r&FbspH7LDqXfCpT$j!V(T&GHVx?Qa&N-fbj28m#n|Y~Ru`9|~ zoEHIDN9n!|BJ4=)dl|b!H_=ce-g5IAiR}_-NqSqgcn+%C)0y^rNQ)Zc4LFRDodxw7Ps6$(>Zi8E;g{M6CbLL8iZ%rGGxEl)W7jvXW?|lm zDLc^p&n&n~Ne%A@gTW;~AVK&vo;>{cd_BDxKaH=)kGIq3hr3a2oL^extxW8#2`5Bf z39URPpFKhKv!{mLsSLG4uEi5m_S)DL&~F*fb7<&JVGj>J|1^2p?rTF=jkw~%Qg4M_ zS}A`bXloPI-%UQ={``2m)mhT2qDu|$_wgioxE5L60*v`A@%0;sA`RgAHT3x26zLjBLTeSSM$ z@1A{jLDk9j-rn5I>@zdZ&ibFfpIc!~8(P=4tfNuaM)t|3cCGua4eU$%B7B}3l0R76 zCc3)Tw_DwhHE!ySiRN!AvPhrmNjrT0l*X>4yFqenr5)+XP@lfulpg_8j>}GsCcorq;9^gCrfJFp{>GG?7WfrBGhA^*#~XTKM*@#)Z8LnG)?b zHG^DBo)G=d(m|e0=l9_1b7<~KYhO>vWipCixBFQmUkSOFqFqR)u&Z!gOJ=Nyf|bt` z#gqpP>Bj@NS;*TsPH&9m>r@`L0xC(O)QUl)T=6Kq>t4i2xd+vFQDdMzm7lTp-byR0 zWc|d8*m#Z?0X1^IwSwtmais1*;IW7$KbEaFaa85&>w}ob7fo+cbfq4Zc zq;e~(5q_MFq^*kk%aDa9@;32D%4qzo<11$&SOv7;I4T6+?(}`Dx5({Z^y|^%7amD< zT+b#c|JGMMHmTU2ERhRX`)`jQ8&Tpjl)hNY)FG9abI^eu7}ZOhURLTSt#4VwI{ZEF zBkxo9)ITvDo5q}Ggvm~t785^_jzS6*T}e*y@E>ndZs3ekJ7SdBsrl4CI%FctBCS}Y=)P|=gMc3%g(??RTr zGpkk$qvN7>VDZ@#z3^@%RYOO5nxRM7oyRdUt@jD|I?zR>;!DiKb=mV>@c-ky)*@o* zJ)+{fcx&St!Pwcrx$mi1M~lf%=eD$lLOWAK6^NaKkiqL$p0rf6ohob@3e@RC|Dc?t&qaB|lQN?jL;CQO_(nqt?e#$a`GnTHY zuFuOEa9Gj3&JXXfkAy9?$3B20pDG`2>aio&#@zRADDE@9)6O<>;!S2h?I+$z*X3F+ z&%Oo2&@a?SYZa;L`|PMOo2d{^ft&|pb@&Ogp?jEPESzG;x2KuZ)w}08V@E7~Zn+y^ zKge13D%7W2CnvohdQfI~*YDL5cUI0lFbmZXdoucJ>bs|l{@&{|?>DZVxT-5@kE`&W za+&F>SpMHw#9X3JEbO)VAg;Z|2o7TxeXl;_#6H@Oc43FAvO2jnH_89LQg*g zo*O7;6Gaqs;t3a&`nsG?{6yCXawFw}pT3_Ke^9E6y;s-=Y2`FUKi*y-dal@dmrZ?* z!tUqya6*zNj`VWu!Cs-4_LJ6sUVF+O2s^iwPI2jcUDeFL7d_dU=p0WTo#{>Ry%csQ zI>jE_&jHW6zWz467yn=JyemUo8p8fqqa*3y--OMq3Q6@aoxcWbbpW@n*qd$!pJ=0sMPuka$Uf_eVG_`8= zJg64=4}cw>nk298Ws^>wb2NF!Q!2C6JZ!^Usa}6uXO1@-ZQCn-? Date: Tue, 8 Aug 2023 11:24:13 -0700 Subject: [PATCH 015/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20=20docker?= =?UTF-8?q?=20files?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Dockerfile | 8 ++++++- db-docker-compose.yml | 13 ++++++++++++ dev-docker-compose.yaml | 19 +++++++---------- requirements.txt | 47 ----------------------------------------- 4 files changed, 28 insertions(+), 59 deletions(-) create mode 100644 db-docker-compose.yml delete mode 100644 requirements.txt diff --git a/Dockerfile b/Dockerfile index e3620299..24060626 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,7 @@ FROM python:3.8-alpine +EXPOSE 5000 + WORKDIR /app ENV POETRY_VERSION=1.3.2 @@ -14,6 +16,10 @@ COPY poetry.lock pyproject.toml ./ RUN poetry config virtualenvs.create false RUN poetry install -COPY . . +COPY apis ./apis +COPY model ./model +COPY core ./core +COPY app.py . +COPY config.py . CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file diff --git a/db-docker-compose.yml b/db-docker-compose.yml new file mode 100644 index 00000000..67b5526c --- /dev/null +++ b/db-docker-compose.yml @@ -0,0 +1,13 @@ +version: '3' +services: + postgres: + image: postgres:9.6 + restart: always + environment: + POSTGRES_PASSWORD: postgres + POSTGRES_USER: postgres + POSTGRES_DB: postgres + ports: + - 5432:5432 + volumes: + - ./postgres-data:/var/lib/postgresql/data \ No newline at end of file diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 1eda0cc2..1e8ea83b 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -3,18 +3,15 @@ services: flask-api: build: context: . - dockerfile: Dockerfile - # image: fairhub-flask-api:local - entrypoint: - - flask - - --debug - - run - - --port=5000 - - --host=0.0.0.0 + dockerfile: Dockerfile ports: - - 5000:5000 - volumes: - - ./:/app + - "5000:5000" + # volumes: + # - ./apis:/app/apis + # - ./model:/app/model + # - ./core:/app/core + # - ./app.py:/app/ + # - ./config.py:/app/ environment: FLASK_ENV: development FLASK_DEBUG: 1 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index ed3a3721..00000000 --- a/requirements.txt +++ /dev/null @@ -1,47 +0,0 @@ -aniso8601==9.0.1 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -art==6.0 ; python_full_version >= "3.8.16" and python_version < "4.0" -attrs==23.1.0 ; python_full_version >= "3.8.16" and python_version < "4.0" -blinker==1.6.2 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -certifi==2023.7.22 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -charset-normalizer==3.2.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -click==8.1.6 ; python_full_version >= "3.8.16" and python_version < "4.0" -colorama==0.4.6 ; python_full_version >= "3.8.16" and platform_system == "Windows" and python_version < "4.0" -decorator==5.1.1 ; python_full_version >= "3.8.16" and python_version < "4.0" -dicttoxml==1.7.16 ; python_full_version >= "3.8.16" and python_version < "4.0" -faker==18.13.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -flask-cors==4.0.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -flask-restx==1.1.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -flask-sqlalchemy==3.0.5 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -flask==2.3.2 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -greenlet==2.0.2 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" and platform_machine == "aarch64" or python_full_version >= "3.8.16" and python_full_version < "4.0.0" and platform_machine == "ppc64le" or python_full_version >= "3.8.16" and python_full_version < "4.0.0" and platform_machine == "x86_64" or python_full_version >= "3.8.16" and python_full_version < "4.0.0" and platform_machine == "amd64" or python_full_version >= "3.8.16" and python_full_version < "4.0.0" and platform_machine == "AMD64" or python_full_version >= "3.8.16" and python_full_version < "4.0.0" and platform_machine == "win32" or python_full_version >= "3.8.16" and python_full_version < "4.0.0" and platform_machine == "WIN32" -idna==3.4 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -importlib-metadata==6.8.0 ; python_full_version >= "3.8.16" and python_version < "3.10" -importlib-resources==6.0.0 ; python_full_version >= "3.8.16" and python_version < "3.9" -itsdangerous==2.1.2 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -jinja2==3.1.2 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -jsonschema-specifications==2023.7.1 ; python_full_version >= "3.8.16" and python_version < "4.0" -jsonschema==4.18.4 ; python_full_version >= "3.8.16" and python_version < "4.0" -markdown==3.3.7 ; python_full_version >= "3.8.16" and python_version < "4.0" -markupsafe==2.1.3 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -minilog==2.2 ; python_full_version >= "3.8.16" and python_version < "4.0" -pkgutil-resolve-name==1.3.10 ; python_full_version >= "3.8.16" and python_version < "3.9" -psycopg2==2.9.6 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -pyfairdatatools==0.1.3 ; python_full_version >= "3.8.16" and python_version < "4.0" -pyflakes==3.1.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -pymdown-extensions==10.1 ; python_full_version >= "3.8.16" and python_version < "4.0" -python-dateutil==2.8.2 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -python-dotenv==1.0.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -pytz==2023.3 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -pyyaml==6.0.1 ; python_full_version >= "3.8.16" and python_version < "4.0" -referencing==0.30.0 ; python_full_version >= "3.8.16" and python_version < "4.0" -requests==2.31.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -rpds-py==0.9.2 ; python_full_version >= "3.8.16" and python_version < "4.0" -six==1.16.0 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -sqlalchemy==2.0.19 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -types-requests==2.31.0.2 ; python_full_version >= "3.8.16" and python_version < "4.0" -types-urllib3==1.26.25.14 ; python_full_version >= "3.8.16" and python_version < "4.0" -typing-extensions==4.7.1 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -urllib3==1.26.16 ; python_full_version >= "3.8.16" and python_version < "4.0" -validators==0.20.0 ; python_full_version >= "3.8.16" and python_version < "4.0" -werkzeug==2.3.6 ; python_full_version >= "3.8.16" and python_full_version < "4.0.0" -zipp==3.16.2 ; python_full_version >= "3.8.16" and python_version < "3.10" From 34e6155a5fada7cce16737ab5918619e6f36cb04 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 14 Aug 2023 12:32:46 -0700 Subject: [PATCH 016/505] fix: model classes changed --- model/__init__.py | 9 +++-- model/dataset.py | 45 +++++++++++++++--------- model/dataset_contributor.py | 56 ++++++++++++++++++++++++++++++ model/dataset_version.py | 46 +++++++++--------------- model/dataset_versions.py | 4 --- model/invited_study_contributor.py | 27 +++++++------- model/participant.py | 8 ++++- model/study.py | 38 +++++++------------- model/study_contributor.py | 35 ++++--------------- model/user.py | 37 +++++++++----------- model/version_contributor.py | 51 --------------------------- 11 files changed, 162 insertions(+), 194 deletions(-) create mode 100644 model/dataset_contributor.py delete mode 100644 model/version_contributor.py diff --git a/model/__init__.py b/model/__init__.py index cabedef6..79041e61 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -6,7 +6,10 @@ from .study import Study from .user import User -# from .invited_study_contributor import InvitedStudyContributor +from .dataset_contributor import DatasetContributor + +from .invited_study_contributor import StudyInvitedContributor +from .study_contributor import StudyContributor __all__ = [ "Study", @@ -16,5 +19,7 @@ "Participant", "db", "User", - # "InvitedStudyContributor" + "DatasetContributor", + "InvitedStudyContributor", + "StudyContributor" ] diff --git a/model/dataset.py b/model/dataset.py index 7cb6663d..3f10ee3d 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -1,5 +1,8 @@ import uuid - +from datetime import datetime +from sqlalchemy.dialects.postgresql import ARRAY +from sqlalchemy import String +from datetime import datetime from sqlalchemy.sql.expression import true import model @@ -14,41 +17,49 @@ def __init__(self, study): __tablename__ = "dataset" id = db.Column(db.CHAR(36), primary_key=True) + # latest_version = db.Column(db.String, nullable=False) + updated_on = db.Column(db.DateTime, nullable=False) + created_at = db.Column(db.DateTime, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="dataset") - dataset_versions = db.relationship( - "DatasetVersion", back_populates="dataset", lazy="dynamic" - ) + + dataset_contributors = db.relationship("DatasetContributor", back_populates="dataset") + dataset_versions = db.relationship("DatasetVersion", back_populates="dataset", lazy="dynamic") def to_dict(self): last_published = self.last_published() last_modified = self.last_modified() - return ( - model.DatasetVersions( - last_published, - last_modified, - last_published.name if last_published else last_modified.name, - self.id, - ) - ).to_dict() + + return { + "id": self.id, + "updated_on": str(datetime.now()), + "created_at": str(datetime.now()), + "dataset_versions": [i.to_dict() for i in self.dataset_versions], + "latest_version": last_published.id if last_published else None + } def last_published(self): return ( self.dataset_versions.filter(model.DatasetVersion.published == true()) - .order_by(model.DatasetVersion.published.desc()) + .order_by(model.DatasetVersion.published_on.desc()) .first() ) def last_modified(self): return self.dataset_versions.order_by( - model.DatasetVersion.modified.desc() + model.DatasetVersion.updated_on.desc() ).first() @staticmethod def from_data(data: dict): + """Creates a new dataset from a dictionary""" dataset = Dataset() - # dataset.id = data["id"] - for i in data.values(): - print(i) + dataset.latest_version = data["latest_version"] + dataset.published_year = data["published_year"] + dataset.resource_type = data["resource_type"] + dataset.publisher = data["publisher"] + dataset.primary_language = data["primary_language"] + dataset.keywords = data["keywords"] + return dataset diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py new file mode 100644 index 00000000..ec075595 --- /dev/null +++ b/model/dataset_contributor.py @@ -0,0 +1,56 @@ +import uuid + +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + +from .db import db + + +class DatasetContributor(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_contributor" + id = db.Column(db.CHAR(36), primary_key=True) + first_name = db.Column(db.String, nullable=False) + last_name = db.Column(db.String, nullable=False) + name_type = db.Column(db.String, nullable=False) + name_identifier = db.Column(db.String, nullable=False) + name_identifier_scheme = db.Column(db.String, nullable=False) + name_identifier_scheme_uri = db.Column(db.String, nullable=False) + creator = db.Column(db.Boolean, nullable=False) + contributor_type = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_contributors" + ) + + def to_dict(self): + return { + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "name_type": self.name_type, + "name_identifier": self.name_identifier, + "name_identifier_scheme": self.name_identifier_scheme, + "name_identifier_scheme_uri": self.name_identifier_scheme_uri, + "creator": self.creator, + "contributor_type": self.contributor_type, + + } + + + @staticmethod + def from_data(data: dict): + dataset_contributor = DatasetContributor() + # dataset_contributor.id = data["id"] + dataset_contributor.first_name = data["first_name"] + dataset_contributor.last_name = data["last_name"] + dataset_contributor.name_type = data["name_type"] + dataset_contributor.name_identifier = data["name_identifier"] + dataset_contributor.name_identifier_scheme = data["name_identifier_scheme"] + dataset_contributor.name_identifier_scheme_uri = data["name_identifier_scheme_uri"] + dataset_contributor.creator = data["creator"] + dataset_contributor.contributor_type = data["contributor_type"] + return dataset_contributor diff --git a/model/dataset_version.py b/model/dataset_version.py index 037a2965..a2fbf21f 100644 --- a/model/dataset_version.py +++ b/model/dataset_version.py @@ -1,19 +1,9 @@ import uuid - +from datetime import datetime from model import Dataset from .db import db -version_contributors = db.Table( - "version_contributors", - db.Model.metadata, - db.Column( - "dataset_version_id", db.ForeignKey("dataset_version.id"), primary_key=True - ), - db.Column("user_id", db.ForeignKey("user.id"), primary_key=True), -) - - version_participants = db.Table( "version_participants", db.Model.metadata, @@ -31,32 +21,28 @@ def __init__(self, dataset): __tablename__ = "dataset_version" id = db.Column(db.CHAR(36), primary_key=True) + title = db.Column(db.String, nullable=False) - description = db.Column(db.String, nullable=False) - keywords = db.Column(db.String, nullable=False) - primary_language = db.Column(db.String, nullable=False) - modified = db.Column(db.DateTime, nullable=True) - published = db.Column(db.Boolean, nullable=False) + published = db.Column(db.BOOLEAN, nullable=False) + changelog = db.Column(db.String, nullable=False) + updated_on = db.Column(db.DateTime, nullable=False) doi = db.Column(db.String, nullable=False) - name = db.Column(db.String, nullable=False) + created_at = db.Column(db.DateTime, nullable=False) + published_on = db.Column(db.DateTime, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship("Dataset", back_populates="dataset_versions") - contributors = db.relationship("User", secondary=version_contributors) participants = db.relationship("Participant", secondary=version_participants) def to_dict(self): return { "id": self.id, "title": self.title, - "description": self.description, - "keywords": self.keywords, - "primary_language": self.primary_language, - "modified": str(self.modified), + "changelog": self.published, + "published_on": str(datetime.now()), + "created_at": str(datetime.now()), "published": self.published, - "contributors": [user.to_dict() for user in self.contributors], "doi": self.doi, - "name": self.name, "participants": [p.id for p in self.participants], } @@ -68,11 +54,11 @@ def from_data(dataset: Dataset, data: dict): def update(self, data): self.title = data["title"] - self.description = data["description"] - self.keywords = data["keywords"] - self.primary_language = data["primary_language"] - self.modified = data["modified"] self.published = data["published"] - self.participants[:] = data["participants"] self.doi = data["doi"] - self.name = data["name"] + self.created_at = data["created_at"] + self.published_on = data["published_on"] + self.participants[:] = data["participants"] + self.changelog = data["changelog"] + + diff --git a/model/dataset_versions.py b/model/dataset_versions.py index 4d519934..f2405398 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -6,14 +6,12 @@ def __init__( self, last_published: model.DatasetVersion, last_modified: model.DatasetVersion, - name: str, id: str, ): self.latest_version = last_modified.id self.published_version = last_published.id self.last_modified = last_modified.modified self.last_published = last_published.modified - self.name = name self.id = id def to_dict(self): @@ -22,7 +20,6 @@ def to_dict(self): "published_version": self.published_version, "last_modified": self.last_modified, "last_published": self.last_published, - "name": self.name, "id": self.id, } @@ -33,6 +30,5 @@ def from_data(data: dict): dataset_versions.latest_version = data["latest_version"] dataset_versions.last_modified = data["last_modified"] dataset_versions.last_published = data["last_published"] - dataset_versions.name = data["name"] dataset_versions.published_version = data["published_version"] return dataset_versions diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 699b6f6c..c302ef44 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -2,37 +2,34 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY - +from datetime import datetime from .db import db -class InvitedStudyContributor(db.Model): +class StudyInvitedContributor(db.Model): def __init__(self): self.id = str(uuid.uuid4()) __tablename__ = "invited_study_contributor" - email_address = db.Column(String, nullable=False) + email_address = db.Column(String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) invited_on = db.Column(db.DateTime, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) - study = db.relationship("Study") + study = db.relationship("Study", back_populates="invited_contributors") def to_dict(self): return { "email_address": self.id, - "permission": self.affiliations, - "date": self.email, - "invited_on": self.first_name, - "orcid": self.ORCID, + "permission": self.permission, + "invited_on": datetime.now(), } @staticmethod def from_data(data: dict): - version_contributor = InvitedStudyContributor() - version_contributor.affiliations = data["affiliations"] - version_contributor.email = data["email"] - version_contributor.first_name = data["first_name"] - version_contributor.last_name = data["last_name"] - version_contributor.orcid = data["orcid"] - return version_contributor + invited_contributor = StudyInvitedContributor() + invited_contributor.email_address = data["email_address"] + invited_contributor.permission = data["permission"] + invited_contributor.invited_on = data["invited_on"] + + return invited_contributor diff --git a/model/participant.py b/model/participant.py index 382b2816..208eada7 100644 --- a/model/participant.py +++ b/model/participant.py @@ -1,7 +1,7 @@ import uuid import model - +from datetime import datetime from .db import db @@ -16,6 +16,8 @@ def __init__(self, study): last_name = db.Column(db.String, nullable=False) address = db.Column(db.String, nullable=False) age = db.Column(db.String, nullable=False) + created_at = db.Column(db.DateTime, nullable=False) + updated_on = db.Column(db.DateTime, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="participants") @@ -32,6 +34,8 @@ def to_dict(self): "last_name": self.last_name, "address": self.address, "age": self.age, + "created_at" : str(datetime.now()), + "published_on": str(datetime.now()), } @staticmethod @@ -46,3 +50,5 @@ def update(self, data): self.last_name = data["last_name"] self.address = data["address"] self.age = data["age"] + self.created_at = data["created_at"] + self.updated_on = data["updated_on"] diff --git a/model/study.py b/model/study.py index 89910d0b..f0a19908 100644 --- a/model/study.py +++ b/model/study.py @@ -1,4 +1,3 @@ -# from .study_contributor import StudyContributor import uuid from datetime import datetime @@ -9,47 +8,36 @@ from .db import db -study_contributors = db.Table( - "study_contributors", - db.Model.metadata, - db.Column("study_id", db.ForeignKey("study.id"), primary_key=True), - db.Column("user_id", db.ForeignKey("user.id"), primary_key=True), -) - class Study(db.Model): """A study is a collection of datasets and participants""" def __init__(self): self.id = str(uuid.uuid4()) + # self.created_at = datetime.now() __tablename__ = "study" id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) - description = db.Column(db.String, nullable=False) image = db.Column(db.String, nullable=False) - size = db.Column(db.String, nullable=False) - keywords = db.Column(ARRAY(String), nullable=False) - last_updated = db.Column(db.DateTime, nullable=False) - dataset = db.relationship("Dataset", back_populates="study") + created_at = db.Column(db.DateTime, nullable=False) + updated_on = db.Column(db.DateTime, nullable=False) - owner = db.relationship("User") - owner_id = db.Column(db.CHAR(36), db.ForeignKey("user.id")) - contributors = db.relationship("User", secondary=study_contributors) + dataset = db.relationship("Dataset", back_populates="study") + study_contributors = db.relationship("StudyContributor", back_populates="study") participants = db.relationship("Participant", back_populates="study") + invited_contributors = db.relationship("StudyInvitedContributor", back_populates="study") def to_dict(self): """Converts the study to a dictionary""" return { "id": self.id, "title": self.title, - "description": self.description, "image": self.image, - "keywords": self.keywords, - "last_updated": str(self.last_updated), - "size": self.size, - "owner": self.owner.to_dict(), + "created_at": str(self.created_at), + "updated_on": str(self.updated_on), + # "study_contributors": self.study_contributors.to_dict(), } @staticmethod @@ -63,12 +51,10 @@ def from_data(data: dict): def update(self, data): """Updates the study from a dictionary""" self.title = data["title"] - self.description = data["description"] self.image = data["image"] - self.size = data["size"] - self.keywords = data["keywords"] - self.last_updated = datetime.now() - self.owner = model.User.from_data(data["owner"]) + # self.user = model.User.from_data(data["user"]) + self.created_at = data["created_at"] + self.updated_on = data["updated_on"] def validate(self): """Validates the study""" diff --git a/model/study_contributor.py b/model/study_contributor.py index 96627551..d98d0a08 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,5 +1,4 @@ import uuid - from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -11,44 +10,24 @@ def __init__(self): self.id = str(uuid.uuid4()) __tablename__ = "study_contributor" - id = db.Column(db.CHAR(36), primary_key=True) - affiliations = db.Column(ARRAY(String), nullable=False) - email = db.Column(db.String, nullable=False) - first_name = db.Column(db.String, nullable=False) - last_name = db.Column(db.String, nullable=False) - orcid = db.Column(db.String, nullable=False) - roles = db.Column(ARRAY(String), nullable=False) permission = db.Column(db.String, nullable=False) - status = db.Column(db.String, nullable=False) + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) + user = db.relationship("User", back_populates="study_contributors") study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) - study = db.relationship("Study", back_populates="contributors") + study = db.relationship("Study", back_populates="study_contributors") def to_dict(self): return { - "id": self.id, - "affiliations": self.affiliations, - "email": self.email, - "first_name": self.first_name, - "last_name": self.last_name, - "orcid": self.orcid, - "roles": self.roles, "permission": self.permission, - "status": self.status, + "user_id": self.user_id, + "study_id": self.study_id, } @staticmethod def from_data(data: dict): study_contributor = StudyContributor() - # for i in data.values(): - # print(i) - # study_contributor.id = data["id"] - study_contributor.affiliations = data["affiliations"] - study_contributor.email = data["email"] - study_contributor.first_name = data["first_name"] - study_contributor.last_name = data["last_name"] - study_contributor.orcid = data["orcid"] - study_contributor.roles = data["roles"] study_contributor.permission = data["permission"] - study_contributor.status = data["status"] + study_contributor.user_id = data["user_id"] + study_contributor.study_id = data["study_id"] return study_contributor diff --git a/model/user.py b/model/user.py index a6cd57ec..773d290f 100644 --- a/model/user.py +++ b/model/user.py @@ -1,8 +1,5 @@ import uuid - -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY - +from datetime import datetime from .db import db @@ -12,37 +9,37 @@ def __init__(self): __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) - affiliations = db.Column(ARRAY(String), nullable=False) - email = db.Column(db.String, nullable=False) + email_address = db.Column(db.String, nullable=False) + username = db.Column(db.String, nullable=False) first_name = db.Column(db.String, nullable=False) last_name = db.Column(db.String, nullable=False) orcid = db.Column(db.String, nullable=False) - roles = db.Column(ARRAY(String), nullable=False) - permission = db.Column(db.String, nullable=False) - status = db.Column(db.String, nullable=False) - + hash = db.Column(db.String, nullable=False) + created_at = db.Column(db.DateTime, nullable=False) + institution = db.Column(db.String, nullable=False) + study_contributors = db.relationship("StudyContributor", back_populates="user") def to_dict(self): return { "id": self.id, - "affiliations": self.affiliations, - "email": self.email, + "email_address": self.email_address, + "username": self.username, "first_name": self.first_name, "last_name": self.last_name, "orcid": self.orcid, - "roles": self.roles, - "permission": self.permission, - "status": self.status, + "hash": self.hash, + "created_at": str(datetime.now()), + "institution": self.institution, } @staticmethod def from_data(data: dict): user = User() - user.affiliations = data["affiliations"] - user.email = data["email"] + user.email_address = data["email_address"] + user.username = data["username"] user.first_name = data["first_name"] user.last_name = data["last_name"] user.orcid = data["orcid"] - user.roles = data["roles"] - user.permission = data["permission"] - user.status = data["status"] + user.hash = data["hash"] + user.created_at = data["created_at"] + user.institution = data["institution"] return user diff --git a/model/version_contributor.py b/model/version_contributor.py deleted file mode 100644 index af08303d..00000000 --- a/model/version_contributor.py +++ /dev/null @@ -1,51 +0,0 @@ -import uuid - -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY - -from .db import db - - -class VersionContributor(db.Model): - def __init__(self): - self.id = str(uuid.uuid4()) - - __tablename__ = "version_contributor" - id = db.Column(db.CHAR(36), primary_key=True) - affiliations = db.Column(ARRAY(String), nullable=False) - email = db.Column(db.String, nullable=False) - first_name = db.Column(db.String, nullable=False) - last_name = db.Column(db.String, nullable=False) - orcid = db.Column(db.String, nullable=False) - roles = db.Column(ARRAY(String), nullable=False) - status = db.Column(db.String, nullable=False) - - dataset_version_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_version.id")) - dataset_version = db.relationship( - "DatasetVersion", back_populates="versionContributors" - ) - - def to_dict(self): - return { - "id": self.id, - "affiliations": self.affiliations, - "email": self.email, - "first_name": self.first_name, - "last_name": self.last_name, - "orcid": self.ORCID, - "roles": self.roles, - "status": self.status, - } - - @staticmethod - def from_data(data: dict): - version_contributor = VersionContributor() - # versionContributor.id = data["id"] - version_contributor.affiliations = data["affiliations"] - version_contributor.email = data["email"] - version_contributor.first_name = data["first_name"] - version_contributor.last_name = data["last_name"] - version_contributor.orcid = data["orcid"] - version_contributor.roles = data["roles"] - version_contributor.status = data["status"] - return version_contributor From aaf6676501f145a7219482f9914898ce692c350f Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 15 Aug 2023 17:51:21 -0700 Subject: [PATCH 017/505] chore: minor changes fixed in models --- model/__init__.py | 2 +- model/dataset.py | 5 +---- model/dataset_contributor.py | 3 --- model/dataset_version.py | 4 ++-- model/invited_study_contributor.py | 5 +---- model/study.py | 2 -- model/study_contributor.py | 2 -- 7 files changed, 5 insertions(+), 18 deletions(-) diff --git a/model/__init__.py b/model/__init__.py index 79041e61..b254806d 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -20,6 +20,6 @@ "db", "User", "DatasetContributor", - "InvitedStudyContributor", + "StudyInvitedContributor", "StudyContributor" ] diff --git a/model/dataset.py b/model/dataset.py index 3f10ee3d..22eb3f64 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -1,7 +1,5 @@ import uuid -from datetime import datetime -from sqlalchemy.dialects.postgresql import ARRAY -from sqlalchemy import String + from datetime import datetime from sqlalchemy.sql.expression import true @@ -17,7 +15,6 @@ def __init__(self, study): __tablename__ = "dataset" id = db.Column(db.CHAR(36), primary_key=True) - # latest_version = db.Column(db.String, nullable=False) updated_on = db.Column(db.DateTime, nullable=False) created_at = db.Column(db.DateTime, nullable=False) diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index ec075595..8618e0ff 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -1,8 +1,5 @@ import uuid -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY - from .db import db diff --git a/model/dataset_version.py b/model/dataset_version.py index a2fbf21f..aa66f70a 100644 --- a/model/dataset_version.py +++ b/model/dataset_version.py @@ -38,11 +38,11 @@ def to_dict(self): return { "id": self.id, "title": self.title, - "changelog": self.published, + "changelog": self.changelog, "published_on": str(datetime.now()), "created_at": str(datetime.now()), - "published": self.published, "doi": self.doi, + "published": self.published, "participants": [p.id for p in self.participants], } diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index c302ef44..b79d76da 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,7 +1,4 @@ import uuid - -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY from datetime import datetime from .db import db @@ -11,7 +8,7 @@ def __init__(self): self.id = str(uuid.uuid4()) __tablename__ = "invited_study_contributor" - email_address = db.Column(String, nullable=False, primary_key=True) + email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) invited_on = db.Column(db.DateTime, nullable=False) diff --git a/model/study.py b/model/study.py index f0a19908..247b5969 100644 --- a/model/study.py +++ b/model/study.py @@ -1,8 +1,6 @@ import uuid from datetime import datetime -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY import model diff --git a/model/study_contributor.py b/model/study_contributor.py index d98d0a08..29a629b0 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,6 +1,4 @@ import uuid -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY from .db import db From 53575a710fc574626ea259f26c91ca03552d22ff Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 15 Aug 2023 17:54:32 -0700 Subject: [PATCH 018/505] fix: api for dataset and participant --- apis/dataset.py | 87 ++++++++++++++------------------------------- apis/participant.py | 2 +- 2 files changed, 27 insertions(+), 62 deletions(-) diff --git a/apis/dataset.py b/apis/dataset.py index aa5bd747..a7f4df97 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -6,57 +6,28 @@ api = Namespace("dataset", description="dataset operations", path="/") -dataset = api.model( - "Dataset", - { - "id": fields.String(required=True), - "last_modified": fields.String(required=True), - "last_published": fields.String(required=True), - "latest_version": fields.String(required=True), - "name": fields.String(required=True), - "published_version": fields.String(required=True), - }, -) - -contributors = api.model( - "Contributors", +dataset_versions_model = api.model( + "DatasetVersion", { "id": fields.String(required=True), - "affiliations": fields.String(required=True), - "email": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "orcid": fields.String(required=True), - "roles": fields.List(fields.String, required=True), - "permission": fields.String(required=True), - "status": fields.String(required=True), - }, -) - -participants = api.model( - "Participants", - { - "id": fields.Boolean(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "address": fields.String(required=True), - "age": fields.String(required=True), + "title": fields.String(required=True), + "changelog": fields.String(required=True), + "created_at": fields.String(required=True), + "doi": fields.String(required=True), + "published": fields.Boolean(required=True), + "participants": fields.List(fields.String, required=True), + "published_on": fields.String(required=True) }, ) - -dataset_version = api.model( - "DatasetVersion", +dataset = api.model( + "Dataset", { "id": fields.String(required=True), - "title": fields.String(required=True), - "description": fields.String(required=True), - "keywords": fields.String(required=True), - "primary_language": fields.String(required=True), - "modified": fields.DateTime(required=True), - "published": fields.Boolean(required=True), - "doi": fields.String(required=True), - "name": fields.String(required=True), + "updated_on": fields.String(required=True), + "created_at": fields.String(required=True), + "dataset_versions": fields.Nested(dataset_versions_model, required=True), + "latest_version": fields.String(required=True) }, ) @@ -66,33 +37,27 @@ class AddDataset(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("add dataset", params={"id": "An ID"}) - # @api.marshal_list_with(dataset) + @api.marshal_with(dataset) # @api.expect(body=dataset) def get(self, study_id): study = Study.query.get(study_id) datasets = Dataset.query.filter_by(study=study) - return jsonify([d.to_dict() for d in datasets]) + return [d.to_dict() for d in datasets] @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("update dataset") + @api.marshal_with(dataset) def post(self, study_id): data = request.json study = Study.query.get(study_id) - # study_id # todo if study.participant id== different study Throw error - # query based on part id and study id (prolly filter but need to find right syntax) - # data["participants"] = [(Participant.filter_by(i="study_id".first()),Participant.filter_by - # (i="participant_id".first()) )for i in data["participants"]] - data["participants"] = [ - Participant.query.get(i).first() for i in data["participants"] - ] dataset_obj = Dataset(study) - dataset_version = DatasetVersion.from_data(dataset_obj, data) + dataset_versions = DatasetVersion.from_data(dataset_obj, data) db.session.add(dataset_obj) - db.session.add(dataset_version) + db.session.add(dataset_versions) db.session.commit() - return dataset_version.to_dict() + return dataset_versions.to_dict() @api.route("/study//dataset//version/") @@ -101,10 +66,10 @@ class UpdateDataset(Resource): @api.response(400, "Validation Error") @api.doc("dataset version") @api.param("id", "Adding version") - # @api.marshal_with(dataset_version) + @api.marshal_with(dataset_versions_model) def get(self, study_id, dataset_id, version_id): dataset_version = DatasetVersion.query.get(version_id) - return jsonify(dataset_version.to_dict()) + return dataset_version.to_dict() def put(self, study_id, dataset_id, version_id): data_version_obj = DatasetVersion.query.get(version_id) @@ -130,10 +95,10 @@ def post(self, study_id: int, dataset_id: int): data = request.json data["participants"] = [Participant.query.get(i) for i in data["participants"]] data_obj = Dataset.query.get(dataset_id) - dataset_version = DatasetVersion.from_data(data_obj, data) - db.session.add(dataset_version) + dataset_versions = DatasetVersion.from_data(data_obj, data) + db.session.add(dataset_versions) db.session.commit() - return jsonify(dataset_version.to_dict()) + return jsonify(dataset_versions.to_dict()) # TODO not finalized endpoint. have to set functionality diff --git a/apis/participant.py b/apis/participant.py index dbcf2a42..de131ad4 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -5,7 +5,7 @@ api = Namespace("participant", description="participant operations", path="/") -participants = api.model( +participant_model = api.model( "Study", { "id": fields.String(required=True), From d370d655eb36eafbbb19ab87d8357610218b1200 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 15 Aug 2023 17:56:05 -0700 Subject: [PATCH 019/505] feat: added all dataset models --- model/dataset/dataset_access.py | 38 +++++++++++++ model/dataset/dataset_consent.py | 53 +++++++++++++++++++ .../dataset_contributor_affiliation.py | 34 ++++++++++++ model/dataset/dataset_date.py | 36 +++++++++++++ model/dataset/dataset_de_ident_level.py | 51 ++++++++++++++++++ model/dataset/dataset_description.py | 33 ++++++++++++ model/dataset/dataset_funder.py | 47 ++++++++++++++++ model/dataset/dataset_identifier.py | 34 ++++++++++++ .../dataset/dataset_managing_organization.py | 35 ++++++++++++ model/dataset/dataset_other.py | 41 ++++++++++++++ model/dataset/dataset_record_keys.py | 32 +++++++++++ .../dataset/dataset_related_item/__init__.py | 0 .../dataset_related_item.py | 31 +++++++++++ .../dataset_related_item_contributor.py | 37 +++++++++++++ .../dataset_related_item_identifier.py | 40 ++++++++++++++ .../dataset_related_item_other.py | 53 +++++++++++++++++++ .../dataset_related_item_title.py | 31 +++++++++++ model/dataset/dataset_rights.py | 40 ++++++++++++++ model/dataset/dataset_title.py | 32 +++++++++++ 19 files changed, 698 insertions(+) create mode 100644 model/dataset/dataset_access.py create mode 100644 model/dataset/dataset_consent.py create mode 100644 model/dataset/dataset_contributor_affiliation.py create mode 100644 model/dataset/dataset_date.py create mode 100644 model/dataset/dataset_de_ident_level.py create mode 100644 model/dataset/dataset_description.py create mode 100644 model/dataset/dataset_funder.py create mode 100644 model/dataset/dataset_identifier.py create mode 100644 model/dataset/dataset_managing_organization.py create mode 100644 model/dataset/dataset_other.py create mode 100644 model/dataset/dataset_record_keys.py create mode 100644 model/dataset/dataset_related_item/__init__.py create mode 100644 model/dataset/dataset_related_item/dataset_related_item.py create mode 100644 model/dataset/dataset_related_item/dataset_related_item_contributor.py create mode 100644 model/dataset/dataset_related_item/dataset_related_item_identifier.py create mode 100644 model/dataset/dataset_related_item/dataset_related_item_other.py create mode 100644 model/dataset/dataset_related_item/dataset_related_item_title.py create mode 100644 model/dataset/dataset_rights.py create mode 100644 model/dataset/dataset_title.py diff --git a/model/dataset/dataset_access.py b/model/dataset/dataset_access.py new file mode 100644 index 00000000..297bb628 --- /dev/null +++ b/model/dataset/dataset_access.py @@ -0,0 +1,38 @@ +import uuid +from ..db import db + +class DatasetAccess(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_access" + id = db.Column(db.CHAR(36), primary_key=True) + type = db.Column(db.String, nullable=False) + description = db.Column(db.String, nullable=False) + url = db.Column(db.String, nullable=False) + url_last_checked = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_access" + ) + + def to_dict(self): + return { + "id": self.id, + "type": self.destypecription, + "description": self.description, + "url": self.url, + "url_last_checked": self.url_last_checked, + + } + + @staticmethod + def from_data(data: dict): + dataset_access = DatasetAccess() + + dataset_access.description = data["description"] + dataset_access.url = data["url"] + dataset_access.url_last_checked = data["url_last_checked"] + dataset_access.type = data["type"] + return dataset_access diff --git a/model/dataset/dataset_consent.py b/model/dataset/dataset_consent.py new file mode 100644 index 00000000..64132b12 --- /dev/null +++ b/model/dataset/dataset_consent.py @@ -0,0 +1,53 @@ +import uuid +from ..db import db + +class DatasetConsent(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_consent" + id = db.Column(db.CHAR(36), primary_key=True) + + type = db.Column(db.String, nullable=False) + noncommercial = db.Column(db.Boolean, nullable=False) + geog_restrict = db.Column(db.Boolean, nullable=False) + research_type = db.Column(db.Boolean, nullable=False) + genetic_only = db.Column(db.Boolean, nullable=False) + no_methods = db.Column(db.Boolean, nullable=False) + details = db.Column(db.Boolean, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_consent" + ) + + def to_dict(self): + return { + "id": self.id, + "type": self.destypecription, + "noncommercial": self.noncommercial, + "geog_restrict": self.geog_restrict, + "research_type": self.research_type, + "genetic_only": self.genetic_only, + "no_methods": self.no_methods, + "details": self.details, + + + + } + + @staticmethod + def from_data(data: dict): + dataset_consent = DatasetConsent() + dataset_consent.type = data["type"] + dataset_consent.noncommercial = data["noncommercial"] + dataset_consent.geog_restrict = data["geog_restrict"] + dataset_consent.research_type = data["research_type"] + dataset_consent.genetic_only = data["genetic_only"] + dataset_consent.no_methods = data["no_methods"] + dataset_consent.details = data["details"] + + return dataset_consent + + + diff --git a/model/dataset/dataset_contributor_affiliation.py b/model/dataset/dataset_contributor_affiliation.py new file mode 100644 index 00000000..3af4ffe8 --- /dev/null +++ b/model/dataset/dataset_contributor_affiliation.py @@ -0,0 +1,34 @@ +import uuid +from ..db import db + +class DatasetContributorAffiliation(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_contributor_affiliation" + id = db.Column(db.CHAR(36), primary_key=True) + identifier = db.Column(db.String, nullable=False) + identifier_scheme = db.Column(db.String, nullable=False) + identifier_scheme_uri = db.Column(db.String, nullable=False) + dataset_contributor = db.relationship( + "DatasetContributor", back_populates="dataset_contributor_affiliation" + ) + contributor_id = db.Column(db.String, db.ForeignKey("dataset.id")) + + + def to_dict(self): + return { + "id": self.id, + "identifier": self.identifier, + "identifier_scheme": self.identifier_scheme, + "identifier_scheme_uri": self.identifier_scheme_uri, + } + + @staticmethod + def from_data(data: dict): + dataset_contributor = DatasetContributorAffiliation() + # dataset_contributor.id = data["id"] + dataset_contributor.name_identifier = data["identifier"] + dataset_contributor.name_identifier_scheme = data["identifier_scheme"] + dataset_contributor.name_identifier_scheme_uri = data["identifier_scheme_uri"] + return dataset_contributor diff --git a/model/dataset/dataset_date.py b/model/dataset/dataset_date.py new file mode 100644 index 00000000..b885528c --- /dev/null +++ b/model/dataset/dataset_date.py @@ -0,0 +1,36 @@ +import uuid +from ..db import db + +class DatasetDate(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_date" + id = db.Column(db.CHAR(36), primary_key=True) + date = db.Column(db.String, nullable=False) + date_type = db.Column(db.String, nullable=False) + data_information = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_date" + ) + + + + def to_dict(self): + return { + "id": self.id, + "date": self.date, + "date_type": self.date_type, + "data_information": self.data_information, + } + + @staticmethod + def from_data(data: dict): + dataset_date = DatasetDate() + # dataset_contributor.id = data["id"] + dataset_date.date = data["date"] + dataset_date.date_type = data["date_type"] + dataset_date.data_information = data["data_information"] + return dataset_date diff --git a/model/dataset/dataset_de_ident_level.py b/model/dataset/dataset_de_ident_level.py new file mode 100644 index 00000000..08fdec94 --- /dev/null +++ b/model/dataset/dataset_de_ident_level.py @@ -0,0 +1,51 @@ +import uuid +from ..db import db + +class DatasetDeIdentLevel(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "datasedataset_de_ident_level" + id = db.Column(db.CHAR(36), primary_key=True) + + type = db.Column(db.String, nullable=False) + direct = db.Column(db.Boolean, nullable=False) + hipaa = db.Column(db.Boolean, nullable=False) + dates = db.Column(db.Boolean, nullable=False) + nonarr = db.Column(db.Boolean, nullable=False) + k_anon = db.Column(db.Boolean, nullable=False) + details = db.Column(db.Boolean, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_de_ident_level" + ) + + def to_dict(self): + return { + "id": self.id, + "type": self.type, + "direct": self.direct, + "hipaa": self.hipaa, + "dates": self.dates, + "nonarr": self.nonarr, + "k_anon": self.k_anon, + "details": self.details, + + } + + @staticmethod + def from_data(data: dict): + dataset_de_ident_level = DatasetDeIdentLevel() + dataset_de_ident_level.type = data["type"] + dataset_de_ident_level.direct = data["direct"] + dataset_de_ident_level.hipaa = data["hipaa"] + dataset_de_ident_level.dates = data["dates"] + dataset_de_ident_level.nonarr = data["nonarr"] + dataset_de_ident_level.k_anon = data["k_anon"] + dataset_de_ident_level.details = data["details"] + + return dataset_de_ident_level + + + diff --git a/model/dataset/dataset_description.py b/model/dataset/dataset_description.py new file mode 100644 index 00000000..b265c915 --- /dev/null +++ b/model/dataset/dataset_description.py @@ -0,0 +1,33 @@ +import uuid +from ..db import db + + +class DatasetContributorAffiliation(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_contributor_affiliation" + id = db.Column(db.CHAR(36), primary_key=True) + description = db.Column(db.String, nullable=False) + description_type = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_contributors" + ) + + def to_dict(self): + return { + "id": self.id, + "description": self.description, + "description_type": self.description_type, + } + + @staticmethod + def from_data(data: dict): + dataset_contributor = DatasetContributorAffiliation() + # dataset_contributor.id = data["id"] + dataset_contributor.description = data["description"] + dataset_contributor.description_type = data["description_type"] + dataset_contributor.name_identifier_scheme_uri = data["identifier_scheme_uri"] + return dataset_contributor diff --git a/model/dataset/dataset_funder.py b/model/dataset/dataset_funder.py new file mode 100644 index 00000000..f907cbd9 --- /dev/null +++ b/model/dataset/dataset_funder.py @@ -0,0 +1,47 @@ +import uuid +from ..db import db + + +class DatasetFunder(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_funder" + id = db.Column(db.CHAR(36), primary_key=True) + name = db.Column(db.String, nullable=False) + identifier = db.Column(db.String, nullable=False) + identifier_type = db.Column(db.String, nullable=False) + identifier_scheme_uri = db.Column(db.String, nullable=False) + award_number = db.Column(db.String, nullable=False) + award_uri = db.Column(db.String, nullable=False) + award_title = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_funder" + ) + + def to_dict(self): + return { + "id": self.id, + "name": self.name, + "identifier": self.identifier, + "identifier_type": self.identifier_type, + "identifier_scheme_uri": self.identifier_scheme_uri, + "award_number": self.award_number, + "award_uri": self.award_uri, + "award_title": self.award_title, + } + + @staticmethod + def from_data(data: dict): + dataset_funder = DatasetFunder() + dataset_funder.name = data["name"] + dataset_funder.identifier = data["identifier"] + dataset_funder.identifier_type = data["identifier_type"] + dataset_funder.identifier_scheme_uri = data["identifier_scheme_uri"] + dataset_funder.award_number = data["award_number"] + dataset_funder.award_uri = data["award_uri"] + dataset_funder.award_title = data["award_title"] + + return dataset_funder diff --git a/model/dataset/dataset_identifier.py b/model/dataset/dataset_identifier.py new file mode 100644 index 00000000..ae65f53b --- /dev/null +++ b/model/dataset/dataset_identifier.py @@ -0,0 +1,34 @@ +import uuid +from ..db import db + +class DatasetIdentifier(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_identifier" + id = db.Column(db.CHAR(36), primary_key=True) + identifier = db.Column(db.String, nullable=False) + identifier_type = db.Column(db.String, nullable=False) + alternate = db.Column(db.Boolean, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_identifier" + ) + + def to_dict(self): + return { + "id": self.id, + "identifier": self.identifier, + "identifier_type": self.identifier_type, + "alternate": self.alternate, + } + + @staticmethod + def from_data(data: dict): + dataset_date = DatasetIdentifier() + + dataset_date.identifier = data["identifier"] + dataset_date.identifier_type = data["identifier_type"] + dataset_date.alternate = data["alternate"] + return dataset_date diff --git a/model/dataset/dataset_managing_organization.py b/model/dataset/dataset_managing_organization.py new file mode 100644 index 00000000..93e91b70 --- /dev/null +++ b/model/dataset/dataset_managing_organization.py @@ -0,0 +1,35 @@ +import uuid +from ..db import db + + +class DatasetManagingOrganization(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_managing_organization" + id = db.Column(db.CHAR(36), primary_key=True) + + name = db.Column(db.String, nullable=False) + ror_id = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_managing_organization" + ) + + def to_dict(self): + return { + "id": self.id, + "name": self.name, + "ror_id": self.ror_id, + } + + @staticmethod + def from_data(data: dict): + dataset_managing_organization = DatasetManagingOrganization() + dataset_managing_organization.name = data["name"] + dataset_managing_organization.ror_id = data["ror_id"] + return dataset_managing_organization + + + diff --git a/model/dataset/dataset_other.py b/model/dataset/dataset_other.py new file mode 100644 index 00000000..9aea3a6e --- /dev/null +++ b/model/dataset/dataset_other.py @@ -0,0 +1,41 @@ +import uuid +from ..db import db + + +class DatasetRights(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_other" + id = db.Column(db.CHAR(36), primary_key=True) + + language = db.Column(db.String, nullable=False) + managing_organization_name = db.Column(db.String, nullable=False) + managing_organization_ror_id = db.Column(db.String, nullable=False) + size = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_other" + ) + + def to_dict(self): + return { + "id": self.id, + "language": self.language, + "managing_organization_name": self.managing_organization_name, + "managing_organization_ror_id": self.managing_organization_ror_id, + "size": self.size, + } + + @staticmethod + def from_data(data: dict): + dataset_other = DatasetRights() + dataset_other.language = data["language"] + dataset_other.managing_organization_name = data["managing_organization_name"] + dataset_other.managing_organization_ror_id = data["managing_organization_ror_id"] + dataset_other.size = data["size"] + return dataset_other + + + diff --git a/model/dataset/dataset_record_keys.py b/model/dataset/dataset_record_keys.py new file mode 100644 index 00000000..90f0a0c7 --- /dev/null +++ b/model/dataset/dataset_record_keys.py @@ -0,0 +1,32 @@ +import uuid +from ..db import db + + +class DatasetRecordsKeys(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_record_keys" + id = db.Column(db.CHAR(36), primary_key=True) + key_type = db.Column(db.String, nullable=False) + key_details = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_record_keys" + ) + + def to_dict(self): + return { + "id": self.id, + "key_type": self.key_type, + "key_details": self.key_details, + } + + @staticmethod + def from_data(data: dict): + dataset_record_keys = DatasetRecordsKeys() + + dataset_record_keys.key_type = data["key_type"] + dataset_record_keys.key_details = data["key_details"] + return dataset_record_keys diff --git a/model/dataset/dataset_related_item/__init__.py b/model/dataset/dataset_related_item/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/model/dataset/dataset_related_item/dataset_related_item.py b/model/dataset/dataset_related_item/dataset_related_item.py new file mode 100644 index 00000000..f52aeb01 --- /dev/null +++ b/model/dataset/dataset_related_item/dataset_related_item.py @@ -0,0 +1,31 @@ +import uuid +from ..db import db + + +class DatasetRelatedItem(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_related_item" + id = db.Column(db.CHAR(36), primary_key=True) + type = db.Column(db.String, nullable=False) + relation_type = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_related_item" + ) + + def to_dict(self): + return { + "id": self.id, + "type": self.type, + "relation_type": self.relation_type, + } + + @staticmethod + def from_data(data: dict): + dataset_related_item = DatasetRelatedItem() + dataset_related_item.type = data["type"] + dataset_related_item.relation_type = data["relation_type"] + return dataset_related_item diff --git a/model/dataset/dataset_related_item/dataset_related_item_contributor.py b/model/dataset/dataset_related_item/dataset_related_item_contributor.py new file mode 100644 index 00000000..807f86ef --- /dev/null +++ b/model/dataset/dataset_related_item/dataset_related_item_contributor.py @@ -0,0 +1,37 @@ +import uuid +from ..db import db + + +class DatasetRelatedItemContributor(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_related_item_contributor" + id = db.Column(db.CHAR(36), primary_key=True) + name = db.Column(db.String, nullable=False) + name_type = db.Column(db.String, nullable=False) + creator = db.Column(db.Boolean, nullable=False) + contributor_type = db.Column(db.String, nullable=False) + + dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item = db.relationship( + "DatasetRelatedItem", back_populates="dataset_related_item_contributor" + ) + + def to_dict(self): + return { + "id": self.id, + "name": self.name, + "name_type": self.name_type, + "creator": self.creator, + "contributor_type": self.contributor_type, + } + + @staticmethod + def from_data(data: dict): + dataset_related_contributor = DatasetRelatedItemContributor() + dataset_related_contributor.name = data["name"] + dataset_related_contributor.name_type = data["name_type"] + dataset_related_contributor.creator = data["creator"] + dataset_related_contributor.contributor_type = data["contributor_type"] + return dataset_related_contributor diff --git a/model/dataset/dataset_related_item/dataset_related_item_identifier.py b/model/dataset/dataset_related_item/dataset_related_item_identifier.py new file mode 100644 index 00000000..a7c4c492 --- /dev/null +++ b/model/dataset/dataset_related_item/dataset_related_item_identifier.py @@ -0,0 +1,40 @@ +import uuid +from ..db import db + + +class DatasetRelatedItemIdentifier(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_related_item_identifier" + id = db.Column(db.CHAR(36), primary_key=True) + identifier = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=False) + metadata_scheme = db.Column(db.Boolean, nullable=False) + scheme_uri = db.Column(db.String, nullable=False) + scheme_type = db.Column(db.String, nullable=False) + + dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item = db.relationship( + "DatasetRelatedItem", back_populates="dataset_related_item_identifier" + ) + + def to_dict(self): + return { + "id": self.id, + "identifier": self.identifier, + "type": self.type, + "metadata_scheme": self.metadata_scheme, + "scheme_uri": self.scheme_uri, + "scheme_type": self.scheme_type + } + + @staticmethod + def from_data(data: dict): + dataset_related_item_identifier = DatasetRelatedItemIdentifier() + dataset_related_item_identifier.identifier = data["identifier"] + dataset_related_item_identifier.type = data["type"] + dataset_related_item_identifier.metadata_scheme = data["metadata_scheme"] + dataset_related_item_identifier.scheme_uri = data["scheme_uri"] + dataset_related_item_identifier.scheme_type = data["scheme_type"] + return dataset_related_item_identifier diff --git a/model/dataset/dataset_related_item/dataset_related_item_other.py b/model/dataset/dataset_related_item/dataset_related_item_other.py new file mode 100644 index 00000000..d5a3626d --- /dev/null +++ b/model/dataset/dataset_related_item/dataset_related_item_other.py @@ -0,0 +1,53 @@ +import uuid +from ..db import db + + +class DatasetRelatedItemOther(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_related_item_other" + id = db.Column(db.CHAR(36), primary_key=True) + publication_year = db.Column(db.String, nullable=False) + volume = db.Column(db.String, nullable=False) + issue = db.Column(db.Boolean, nullable=False) + number_value = db.Column(db.String, nullable=False) + number_type = db.Column(db.String, nullable=False) + first_page = db.Column(db.String, nullable=False) + last_page = db.Column(db.Boolean, nullable=False) + publisher = db.Column(db.String, nullable=False) + edition = db.Column(db.String, nullable=False) + + dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item = db.relationship( + "DatasetRelatedItem", back_populates="dataset_related_item_other" + ) + + def to_dict(self): + return { + "id": self.id, + "publication_year": self.publication_year, + "volume": self.volume, + "issue": self.issue, + "number_value": self.number_value, + "number_type": self.number_type, + "first_page": self.first_page, + "last_page": self.last_page, + "publisher": self.publisher, + "edition": self.edition, + "scheme_type": self.scheme_type + } + + @staticmethod + def from_data(data: dict): + dataset_related_item_other = DatasetRelatedItemOther() + dataset_related_item_other.publication_year = data["publication_year"] + dataset_related_item_other.volume = data["volume"] + dataset_related_item_other.issue = data["issue"] + dataset_related_item_other.number_value = data["number_value"] + dataset_related_item_other.number_type = data["number_type"] + dataset_related_item_other.first_page = data["first_page"] + dataset_related_item_other.last_page = data["last_page"] + dataset_related_item_other.publisher = data["publisher"] + dataset_related_item_other.edition = data["edition"] + return dataset_related_item_other diff --git a/model/dataset/dataset_related_item/dataset_related_item_title.py b/model/dataset/dataset_related_item/dataset_related_item_title.py new file mode 100644 index 00000000..4bafe491 --- /dev/null +++ b/model/dataset/dataset_related_item/dataset_related_item_title.py @@ -0,0 +1,31 @@ +import uuid +from ..db import db + + +class DatasetRelatedItemTitle(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_related_item_title" + id = db.Column(db.CHAR(36), primary_key=True) + type = db.Column(db.String, nullable=False) + title = db.Column(db.String, nullable=False) + + dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item = db.relationship( + "DatasetRelatedItem", back_populates="dataset_related_item_title" + ) + + def to_dict(self): + return { + "id": self.id, + "type": self.type, + "title": self.title, + } + + @staticmethod + def from_data(data: dict): + dataset_related_item_title = DatasetRelatedItemTitle() + dataset_related_item_title.type = data["type"] + dataset_related_item_title.title = data["title"] + return dataset_related_item_title diff --git a/model/dataset/dataset_rights.py b/model/dataset/dataset_rights.py new file mode 100644 index 00000000..0ad40ba8 --- /dev/null +++ b/model/dataset/dataset_rights.py @@ -0,0 +1,40 @@ +import uuid +from ..db import db + +class DatasetRights(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_rights" + id = db.Column(db.CHAR(36), primary_key=True) + + rights = db.Column(db.String, nullable=False) + uri = db.Column(db.String, nullable=False) + identifier = db.Column(db.String, nullable=False) + identifier_scheme = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_rights" + ) + + def to_dict(self): + return { + "id": self.id, + "rights": self.rights, + "uri": self.uri, + "identifier": self.identifier, + "identifier_scheme": self.identifier_scheme, + } + + @staticmethod + def from_data(data: dict): + dataset_rights = DatasetRights() + dataset_rights.rights = data["rights"] + dataset_rights.uri = data["uri"] + dataset_rights.identifier = data["identifier"] + dataset_rights.identifier_scheme = data["identifier_scheme"] + return dataset_rights + + + diff --git a/model/dataset/dataset_title.py b/model/dataset/dataset_title.py new file mode 100644 index 00000000..43caca7d --- /dev/null +++ b/model/dataset/dataset_title.py @@ -0,0 +1,32 @@ +import uuid +from ..db import db + + +class DatasetTitle(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_contributor_affiliation" + id = db.Column(db.CHAR(36), primary_key=True) + title = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=False) + dataset_contributor = db.relationship( + "DatasetContributor", back_populates="dataset_title" + ) + contributor_id = db.Column(db.String, db.ForeignKey("dataset.id")) + + + def to_dict(self): + return { + "id": self.id, + "title": self.title, + "type": self.type, + } + + @staticmethod + def from_data(data: dict): + dataset_contributor = DatasetTitle() + # dataset_contributor.id = data["id"] + dataset_contributor.title = data["title"] + dataset_contributor.type = data["type"] + return dataset_contributor From c9eb4ef7e7d1c548980b88895478ee9453b63c1f Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 15 Aug 2023 17:57:02 -0700 Subject: [PATCH 020/505] feat: added all study models --- model/study/study_arm.py | 55 +++++++++++ model/study/study_avaliable_ipd.py | 49 +++++++++ model/study/study_contact.py | 62 ++++++++++++ model/study/study_description.py | 44 +++++++++ model/study/study_design.py | 104 ++++++++++++++++++++ model/study/study_eligibility.py | 68 +++++++++++++ model/study/study_intervention.py | 55 +++++++++++ model/study/study_ipdsharing.py | 60 +++++++++++ model/study/study_link.py | 44 +++++++++ model/study/study_location.py | 55 +++++++++++ model/study/study_other.py | 53 ++++++++++ model/study/study_overall_official.py | 50 ++++++++++ model/study/study_reference.py | 51 ++++++++++ model/study/study_sponsors_collaborators.py | 65 ++++++++++++ model/study/study_status.py | 59 +++++++++++ 15 files changed, 874 insertions(+) create mode 100644 model/study/study_arm.py create mode 100644 model/study/study_avaliable_ipd.py create mode 100644 model/study/study_contact.py create mode 100644 model/study/study_description.py create mode 100644 model/study/study_design.py create mode 100644 model/study/study_eligibility.py create mode 100644 model/study/study_intervention.py create mode 100644 model/study/study_ipdsharing.py create mode 100644 model/study/study_link.py create mode 100644 model/study/study_location.py create mode 100644 model/study/study_other.py create mode 100644 model/study/study_overall_official.py create mode 100644 model/study/study_reference.py create mode 100644 model/study/study_sponsors_collaborators.py create mode 100644 model/study/study_status.py diff --git a/model/study/study_arm.py b/model/study/study_arm.py new file mode 100644 index 00000000..9d824f14 --- /dev/null +++ b/model/study/study_arm.py @@ -0,0 +1,55 @@ +import uuid +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + +from ..db import db + + +class StudyArm(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + # self.created_at = datetime.now() + + __tablename__ = "study_arm" + + id = db.Column(db.CHAR(36), primary_key=True) + label = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=False) + description = db.Column(db.DateTime, nullable=False) + intervention_list = db.Column(ARRAY(String), nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_arm") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "label": self.label, + "type": self.type, + "description": str(self.description), + "intervention_list": self.intervention_list, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_arm = StudyArm() + study_arm.update(data) + + return study_arm + + def update(self, data): + """Updates the study from a dictionary""" + self.label = data["label"] + self.type = data["type"] + self.description = data["description"] + self.intervention_list = data["intervention_list"] + + + def validate(self): + """Validates the study""" + violations = [] + return violations diff --git a/model/study/study_avaliable_ipd.py b/model/study/study_avaliable_ipd.py new file mode 100644 index 00000000..4186ec0f --- /dev/null +++ b/model/study/study_avaliable_ipd.py @@ -0,0 +1,49 @@ +import uuid + +from ..db import db + + +class StudyAvailable(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + __tablename__ = "study_available" + + id = db.Column(db.CHAR(36), primary_key=True) + identifier = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=False) + url = db.Column(db.String, nullable=False) + comment = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_available") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "identifier": self.identifier, + "type": self.type, + "url": self.url, + "comment": self.comment, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_available = StudyAvailable() + study_available.update(data) + return study_available + + def update(self, data): + """Updates the study from a dictionary""" + self.identifier = data["identifier"] + self.type = data["type"] + self.url = data["url"] + self.comment = data["comment"] + + def validate(self): + """Validates the study""" + violations = [] + return violations diff --git a/model/study/study_contact.py b/model/study/study_contact.py new file mode 100644 index 00000000..1ca0d531 --- /dev/null +++ b/model/study/study_contact.py @@ -0,0 +1,62 @@ +import uuid +from ..db import db + + +class StudyContact(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_contact" + + id = db.Column(db.CHAR(36), primary_key=True) + first_name = db.Column(db.String, nullable=False) + last_name = db.Column(db.String, nullable=False) + affiliation = db.Column(db.String, nullable=False) + role = db.Column(db.String, nullable=False) + phone = db.Column(db.String, nullable=False) + phone_ext = db.Column(db.String, nullable=False) + email_address = db.Column(db.String, nullable=False) + central_contact = db.Column(db.Boolean, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_contact") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "affiliation": self.affiliation, + "role": self.role, + "phone": self.phone, + "phone_ext": self.phone_ext, + "email_address": self.email_address, + "central_contact": self.central_contact, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_contact = StudyContact() + study_contact.update(data) + + return study_contact + + def update(self, data): + """Updates the study from a dictionary""" + self.first_name = data["first_name"] + self.last_name = data["last_name"] + self.affiliation = data["affiliation"] + self.role = data["role"] + self.phone = data["phone"] + self.phone_ext = data["phone_ext"] + self.email_address = data["email_address"] + self.central_contact = data["central_contact"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_description.py b/model/study/study_description.py new file mode 100644 index 00000000..4a226504 --- /dev/null +++ b/model/study/study_description.py @@ -0,0 +1,44 @@ +import uuid +from ..db import db + +class StudyDescription(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_description" + + id = db.Column(db.CHAR(36), primary_key=True) + brief_summary = db.Column(db.String, nullable=False) + detailed_description = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_description") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "brief_summary": self.brief_summary, + "detailed_description": self.detailed_description, + + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_description = StudyDescription() + study_description.update(data) + + return study_description + + def update(self, data): + """Updates the study from a dictionary""" + self.brief_summary = data["brief_summary"] + self.detailed_description = data["detailed_description"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_design.py b/model/study/study_design.py new file mode 100644 index 00000000..57296a2d --- /dev/null +++ b/model/study/study_design.py @@ -0,0 +1,104 @@ +import uuid +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + +from ..db import db + + +class StudyDesign(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + __tablename__ = "study_design" + + id = db.Column(db.CHAR(36), primary_key=True) + design_allocation = db.Column(db.String, nullable=False) + study_type = db.Column(db.String, nullable=False) + design_interventional_model = db.Column(db.String, nullable=False) + design_intervention_model_description = db.Column(db.String, nullable=False) + design_primary_purpose = db.Column(db.String, nullable=False) + design_masking = db.Column(db.String, nullable=False) + design_masking_description = db.Column(db.String, nullable=False) + design_who_masked_list = db.Column(ARRAY(String), nullable=False) + phase_list = db.Column(ARRAY(String), nullable=False) + enrollment_count = db.Column(db.String, nullable=False) + enrollment_type = db.Column(db.String, nullable=False) + number_arms = db.Column(db.Integer, nullable=False) + design_observational_model_list = db.Column(ARRAY(String), nullable=False) + design_time_perspective_list = db.Column(ARRAY(String), nullable=False) + bio_spec_retention = db.Column(db.String, nullable=False) + bio_spec_description = db.Column(db.String, nullable=False) + target_duration = db.Column(db.String, nullable=False) + number_groups_cohorts = db.Column(db.Integer, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_design") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "design_allocation": self.design_allocation, + "study_type": self.study_type, + "design_interventional_model": str(self.design_interventional_model), + "design_intervention_model_description": self.design_intervention_model_description, + "design_primary_purpose": self.design_primary_purpose, + "design_masking": self.design_masking, + "design_masking_description": self.design_masking_description, + "design_who_masked_list": str(self.design_who_masked_list), + "phase_list": self.phase_list, + "enrollment_count": self.enrollment_count, + "enrollment_type": self.enrollment_type, + "number_arms": self.number_arms, + "design_observational_model_list": str(self.design_observational_model_list), + "design_time_perspective_list": self.design_time_perspective_list, + "bio_spec_retention": self.bio_spec_retention, + "bio_spec_description": self.bio_spec_description, + "target_duration": self.target_duration, + "number_groups_cohorts": str(self.number_groups_cohorts), + "intervention_list": self.intervention_list + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_design = StudyDesign() + study_design.update(data) + + return study_design + + def update(self, data): + """Updates the study from a dictionary""" + self.design_allocation = data["design_allocation"] + self.study_type = data["study_type"] + self.design_interventional_model = data["design_interventional_model"] + self.design_intervention_model_description = data["design_intervention_model_description"] + self.design_primary_purpose = data["design_primary_purpose"] + self.design_masking = data["design_masking"] + self.design_masking_description = data["design_masking_description"] + self.design_who_masked_list = data["design_who_masked_list"] + self.phase_list = data["phase_list"] + self.enrollment_count = data["enrollment_count"] + self.enrollment_type = data["enrollment_type"] + self.number_arms = data["number_arms"] + self.design_observational_model_list = data["design_observational_model_list"] + self.design_time_perspective_list = data["design_time_perspective_list"] + self.bio_spec_retention = data["bio_spec_retention"] + self.bio_spec_description = data["bio_spec_description"] + self.target_duration = data["target_duration"] + self.number_groups_cohorts = data["number_groups_cohorts"] + self.intervention_list = data["intervention_list"] + + + + + + + + + + def validate(self): + """Validates the study""" + violations = [] + return violations diff --git a/model/study/study_eligibility.py b/model/study/study_eligibility.py new file mode 100644 index 00000000..8619f0d3 --- /dev/null +++ b/model/study/study_eligibility.py @@ -0,0 +1,68 @@ +import uuid +from ..db import db +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + + +class StudyEligibility(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_eligibility" + + id = db.Column(db.CHAR(36), primary_key=True) + gender = db.Column(db.String, nullable=False) + gender_based = db.Column(db.Boolean, nullable=False) + gender_description = db.Column(db.String, nullable=False) + minimum_age = db.Column(db.String, nullable=False) + maximum_age = db.Column(db.Boolean, nullable=False) + inclusion_criteria = db.Column(ARRAY(String), nullable=False) + exclusion_criteria = db.Column(ARRAY(String), nullable=False) + study_population = db.Column(db.String, nullable=False) + sampling_method = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_eligibility") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "gender": self.gender, + "gender_based": self.gender_based, + "gender_description": self.gender_description, + "minimum_age": self.miminum_age, + "maximum_age": self.maximum_age, + "inclusion_criteria": self.inclusion_criteria, + "exclusion_criteria": self.exclusion_criteria, + "study_population": self.study_population, + "sampling_method": self.sampling_method, + + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_eligibility = StudyEligibility() + study_eligibility.update(data) + + return study_eligibility + + def update(self, data): + """Updates the study from a dictionary""" + self.gender = data["gender"] + self.gender_based = data["gender_based"] + self.gender_description = data["gender_description"] + self.minimum_age = data["minimum_age"] + self.maximum_age = data["maximum_age"] + self.inclusion_criteria = data["inclusion_criteria"] + self.exclusion_criteria = data["exclusion_criteria"] + self.study_population = data["study_population"] + self.sampling_method = data["sampling_method"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_intervention.py b/model/study/study_intervention.py new file mode 100644 index 00000000..7d096c90 --- /dev/null +++ b/model/study/study_intervention.py @@ -0,0 +1,55 @@ +import uuid +from ..db import db +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + + +class StudyIntervention(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_intervention" + + id = db.Column(db.CHAR(36), primary_key=True) + type = db.Column(db.String, nullable=False) + name = db.Column(db.String, nullable=False) + description = db.Column(db.String, nullable=False) + arm_group_label_list = db.Column(ARRAY(String), nullable=False) + other_name_list = db.Column(ARRAY(String), nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_intervention") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "type": self.type, + "name": self.name, + "description": self.description, + "arm_group_label_list": self.arm_group_label_list, + "other_name_list": self.other_name_list, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_intervention = StudyIntervention() + study_intervention.update(data) + + return study_intervention + + def update(self, data): + """Updates the study from a dictionary""" + self.type = data["type"] + self.name = data["name"] + self.description = data["description"] + self.arm_group_label_list = data["arm_group_label_list"] + self.other_name_list = data["other_name_list"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_ipdsharing.py b/model/study/study_ipdsharing.py new file mode 100644 index 00000000..d3c9ab1a --- /dev/null +++ b/model/study/study_ipdsharing.py @@ -0,0 +1,60 @@ +import uuid +from ..db import db +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + + +class StudyIpdsharing(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_ipdsharing" + + id = db.Column(db.CHAR(36), primary_key=True) + ipd_sharing = db.Column(db.String, nullable=False) + ipd_sharing_description = db.Column(db.String, nullable=False) + ipd_sharing_info_type_list = db.Column(ARRAY(String), nullable=False) + ipd_sharing_time_frame = db.Column(db.String, nullable=False) + ipd_sharing_access_criteria = db.Column(db.String, nullable=False) + ipd_sharing_url = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_ipdsharing") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "ipd_sharing": self.ipd_sharing, + "ipd_sharing_description": self.ipd_sharing_description, + "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, + "ipd_sharing_time_frame": self.ipd_sharing_time_frame, + "ipd_sharing_access_criteria": self.ipd_sharing_access_criteria, + "ipd_sharing_url": self.ipd_sharing_url, + + + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_ipdsharing = StudyIpdsharing() + study_ipdsharing.update(data) + + return study_ipdsharing + + def update(self, data): + """Updates the study from a dictionary""" + self.ipd_sharing = data["ipd_sharing"] + self.ipd_sharing_description = data["ipd_sharing_description"] + self.ipd_sharing_info_type_list = data["ipd_sharing_info_type_list"] + self.ipd_sharing_time_frame = data["ipd_sharing_time_frame"] + self.ipd_sharing_access_criteria = data["ipd_sharing_access_criteria"] + self.ipd_sharing_url = data["ipd_sharing_url"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_link.py b/model/study/study_link.py new file mode 100644 index 00000000..19479631 --- /dev/null +++ b/model/study/study_link.py @@ -0,0 +1,44 @@ +import uuid +from ..db import db + + +class StudyLink(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_link" + + id = db.Column(db.CHAR(36), primary_key=True) + url = db.Column(db.String, nullable=False) + title = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_link") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "url": self.url, + "title": self.title + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_link = StudyLink() + study_link.update(data) + + return study_link + + def update(self, data): + """Updates the study from a dictionary""" + self.url = data["url"] + self.title = data["title"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_location.py b/model/study/study_location.py new file mode 100644 index 00000000..12d1d5c4 --- /dev/null +++ b/model/study/study_location.py @@ -0,0 +1,55 @@ +import uuid +from ..db import db + +class StudyLocation(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_location" + + id = db.Column(db.CHAR(36), primary_key=True) + facility = db.Column(db.String, nullable=False) + status = db.Column(db.String, nullable=False) + city = db.Column(db.String, nullable=False) + state = db.Column(db.String, nullable=False) + zip = db.Column(db.String, nullable=False) + country = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_location") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "facility": self.facility, + "status": self.status, + "city": self.city, + "state": self.state, + "zip": self.zip, + "country": self.country, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_location = StudyLocation() + study_location.update(data) + + return study_location + + def update(self, data): + """Updates the study from a dictionary""" + self.facility = data["facility"] + self.status = data["status"] + self.city = data["city"] + self.state = data["state"] + self.zip = data["zip"] + self.country = data["country"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_other.py b/model/study/study_other.py new file mode 100644 index 00000000..8a95a221 --- /dev/null +++ b/model/study/study_other.py @@ -0,0 +1,53 @@ +import uuid +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + +from ..db import db + + +class StudyOther(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + __tablename__ = "study_other" + + id = db.Column(db.CHAR(36), primary_key=True) + oversight_has_dmc = db.Column(db.Boolean, nullable=False) + conditions = db.Column(ARRAY(String), nullable=False) + keywords = db.Column(ARRAY(String), nullable=False) + size = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_other") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "oversight_has_dmc": self.oversight_has_dmc, + "conditions": self.conditions, + "keywords": self.keywords, + "size": self.size, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_other = StudyOther() + study_other.update(data) + + return study_other + + def update(self, data): + """Updates the study from a dictionary""" + self.oversight_has_dmc = data["oversight_has_dmc"] + self.conditions = data["conditions"] + self.keywords = data["keywords"] + self.size = data["size"] + + + def validate(self): + """Validates the study""" + violations = [] + return violations diff --git a/model/study/study_overall_official.py b/model/study/study_overall_official.py new file mode 100644 index 00000000..4a242084 --- /dev/null +++ b/model/study/study_overall_official.py @@ -0,0 +1,50 @@ +import uuid +from ..db import db + + +class StudyOverall(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + __tablename__ = "study_overall" + + id = db.Column(db.CHAR(36), primary_key=True) + first_name = db.Column(db.String, nullable=False) + last_name = db.Column(db.String, nullable=False) + affiliation = db.Column(db.String, nullable=False) + role = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_overall") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "affiliation": self.affiliation, + "role": self.role, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_overall = StudyOverall() + study_overall.update(data) + + return study_overall + + def update(self, data): + """Updates the study from a dictionary""" + self.first_name = data["first_name"] + self.last_name = data["last_name"] + self.affiliation = data["affiliation"] + self.role = data["role"] + + + def validate(self): + """Validates the study""" + violations = [] + return violations diff --git a/model/study/study_reference.py b/model/study/study_reference.py new file mode 100644 index 00000000..f08d0aa1 --- /dev/null +++ b/model/study/study_reference.py @@ -0,0 +1,51 @@ +import uuid + +from ..db import db + + +class StudyReference(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + __tablename__ = "study_reference" + + id = db.Column(db.CHAR(36), primary_key=True) + identifier = db.Column(db.String, nullable=False) + title = db.Column(db.String, nullable=False) + type = db.Column(db.Boolean, nullable=False) + citation = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_reference") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "identifier": self.identifier, + "title": self.title, + "type": self.type, + "citation": self.citation, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_reference = StudyReference() + study_reference.update(data) + + return study_reference + + def update(self, data): + """Updates the study from a dictionary""" + self.identifier = data["identifier"] + self.title = data["title"] + self.type = data["type"] + self.citation = data["citation"] + + + def validate(self): + """Validates the study""" + violations = [] + return violations diff --git a/model/study/study_sponsors_collaborators.py b/model/study/study_sponsors_collaborators.py new file mode 100644 index 00000000..6b40bee3 --- /dev/null +++ b/model/study/study_sponsors_collaborators.py @@ -0,0 +1,65 @@ +import uuid +from ..db import db +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + + +class StudySponsorsCollaborators(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_sponsors_collaborators" + + id = db.Column(db.CHAR(36), primary_key=True) + responsible_party_type = db.Column(db.String, nullable=False) + responsible_party_investigator_first_name = db.Column(db.String, nullable=False) + responsible_party_investigator_last_name = db.Column(db.String, nullable=False) + responsible_party_investigator_title = db.Column(db.String, nullable=False) + responsible_party_investigator_affiliation = db.Column(db.String, nullable=False) + lead_sponsor_first_name = db.Column(db.String, nullable=False) + lead_sponsor_last_name = db.Column(db.String, nullable=False) + collaborator_name = db.Column(ARRAY(String), nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_sponsors_collaborators") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "responsible_party_type": self.responsible_party_type, + "responsible_party_investigator_first_name": self.responsible_party_investigator_first_name, + "responsible_party_investigator_last_name": self.responsible_party_investigator_last_name, + "responsible_party_investigator_title": self.responsible_party_investigator_title, + "responsible_party_investigator_affiliation": self.responsible_party_investigator_affiliation, + "lead_sponsor_first_name": self.lead_sponsor_first_name, + "lead_sponsor_last_name": self.lead_sponsor_last_name, + "collaborator_name": self.collaborator_name, + + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_sponsors_collaborators = StudySponsorsCollaborators() + study_sponsors_collaborators.update(data) + + return study_sponsors_collaborators + + def update(self, data): + """Updates the study from a dictionary""" + self.responsible_party_type = data["responsible_party_type"] + self.responsible_party_investigator_first_name = data["responsible_party_investigator_first_name"] + self.responsible_party_investigator_last_name = data["responsible_party_investigator_last_name"] + self.responsible_party_investigator_title = data["responsible_party_investigator_title"] + self.responsible_party_investigator_affiliation = data["responsible_party_investigator_affiliation"] + self.lead_sponsor_first_name = data["lead_sponsor_first_name"] + self.lead_sponsor_last_name = data["lead_sponsor_last_name"] + self.collaborator_name = data["collaborator_name"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_status.py b/model/study/study_status.py new file mode 100644 index 00000000..55e77080 --- /dev/null +++ b/model/study/study_status.py @@ -0,0 +1,59 @@ +import uuid + +from ..db import db + + +class StudyStatus(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + # self.created_at = datetime.now() + + __tablename__ = "study_status" + + id = db.Column(db.CHAR(36), primary_key=True) + overall_status = db.Column(db.String, nullable=False) + why_stopped = db.Column(db.String, nullable=False) + start_date = db.Column(db.DateTime, nullable=False) + start_date_type = db.Column(db.String, nullable=False) + completion_date = db.Column(db.DateTime, nullable=False) + completion_date_type = db.Column(db.String, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_status") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "overall_status": self.overall_status, + "why_stopped": self.why_stopped, + "start_date": str(self.start_date), + "start_date_type": self.start_date_type, + "completion_date": str(self.completion_date), + "completion_date_type": self.completion_date_type, + + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_status = StudyStatus() + study_status.update(data) + + return study_status + + def update(self, data): + """Updates the study from a dictionary""" + self.overall_status = data["title"] + self.why_stopped = data["image"] + self.start_date = data["created_at"] + self.start_date_type = data["updated_on"] + self.completion_date = data["title"] + self.completion_date_type = data["image"] + + def validate(self): + """Validates the study""" + violations = [] + return violations From 3a4f50dade1a5caec7b882a399d81a018888fc4f Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 16 Aug 2023 00:57:13 +0000 Subject: [PATCH 021/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dataset.py | 4 ++-- model/__init__.py | 2 +- model/dataset.py | 10 +++++++--- model/dataset/dataset_access.py | 6 ++---- model/dataset/dataset_consent.py | 11 ++--------- model/dataset/dataset_contributor_affiliation.py | 2 +- model/dataset/dataset_date.py | 7 ++----- model/dataset/dataset_de_ident_level.py | 9 ++------- model/dataset/dataset_description.py | 4 +--- model/dataset/dataset_funder.py | 4 +--- model/dataset/dataset_identifier.py | 5 ++--- model/dataset/dataset_managing_organization.py | 7 +------ model/dataset/dataset_other.py | 11 ++++------- model/dataset/dataset_record_keys.py | 4 +--- .../dataset_related_item/dataset_related_item.py | 4 +--- .../dataset_related_item_contributor.py | 4 +++- .../dataset_related_item_identifier.py | 6 ++++-- .../dataset_related_item_other.py | 6 ++++-- .../dataset_related_item_title.py | 4 +++- model/dataset/dataset_rights.py | 8 ++------ model/dataset/dataset_title.py | 1 - model/dataset_contributor.py | 10 ++++------ model/dataset_version.py | 2 -- model/participant.py | 2 +- model/study.py | 4 +++- model/user.py | 3 ++- 26 files changed, 56 insertions(+), 84 deletions(-) diff --git a/apis/dataset.py b/apis/dataset.py index a7f4df97..0bdc01fb 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -16,7 +16,7 @@ "doi": fields.String(required=True), "published": fields.Boolean(required=True), "participants": fields.List(fields.String, required=True), - "published_on": fields.String(required=True) + "published_on": fields.String(required=True), }, ) @@ -27,7 +27,7 @@ "updated_on": fields.String(required=True), "created_at": fields.String(required=True), "dataset_versions": fields.Nested(dataset_versions_model, required=True), - "latest_version": fields.String(required=True) + "latest_version": fields.String(required=True), }, ) diff --git a/model/__init__.py b/model/__init__.py index b254806d..371800ec 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -21,5 +21,5 @@ "User", "DatasetContributor", "StudyInvitedContributor", - "StudyContributor" + "StudyContributor", ] diff --git a/model/dataset.py b/model/dataset.py index 22eb3f64..cf93aea4 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -21,8 +21,12 @@ def __init__(self, study): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="dataset") - dataset_contributors = db.relationship("DatasetContributor", back_populates="dataset") - dataset_versions = db.relationship("DatasetVersion", back_populates="dataset", lazy="dynamic") + dataset_contributors = db.relationship( + "DatasetContributor", back_populates="dataset" + ) + dataset_versions = db.relationship( + "DatasetVersion", back_populates="dataset", lazy="dynamic" + ) def to_dict(self): last_published = self.last_published() @@ -33,7 +37,7 @@ def to_dict(self): "updated_on": str(datetime.now()), "created_at": str(datetime.now()), "dataset_versions": [i.to_dict() for i in self.dataset_versions], - "latest_version": last_published.id if last_published else None + "latest_version": last_published.id if last_published else None, } def last_published(self): diff --git a/model/dataset/dataset_access.py b/model/dataset/dataset_access.py index 297bb628..1ecf9139 100644 --- a/model/dataset/dataset_access.py +++ b/model/dataset/dataset_access.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetAccess(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -13,9 +14,7 @@ def __init__(self): url_last_checked = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_access" - ) + dataset = db.relationship("Dataset", back_populates="dataset_access") def to_dict(self): return { @@ -24,7 +23,6 @@ def to_dict(self): "description": self.description, "url": self.url, "url_last_checked": self.url_last_checked, - } @staticmethod diff --git a/model/dataset/dataset_consent.py b/model/dataset/dataset_consent.py index 64132b12..99172107 100644 --- a/model/dataset/dataset_consent.py +++ b/model/dataset/dataset_consent.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetConsent(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -17,9 +18,7 @@ def __init__(self): details = db.Column(db.Boolean, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_consent" - ) + dataset = db.relationship("Dataset", back_populates="dataset_consent") def to_dict(self): return { @@ -31,9 +30,6 @@ def to_dict(self): "genetic_only": self.genetic_only, "no_methods": self.no_methods, "details": self.details, - - - } @staticmethod @@ -48,6 +44,3 @@ def from_data(data: dict): dataset_consent.details = data["details"] return dataset_consent - - - diff --git a/model/dataset/dataset_contributor_affiliation.py b/model/dataset/dataset_contributor_affiliation.py index 3af4ffe8..bb1e6901 100644 --- a/model/dataset/dataset_contributor_affiliation.py +++ b/model/dataset/dataset_contributor_affiliation.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetContributorAffiliation(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -15,7 +16,6 @@ def __init__(self): ) contributor_id = db.Column(db.String, db.ForeignKey("dataset.id")) - def to_dict(self): return { "id": self.id, diff --git a/model/dataset/dataset_date.py b/model/dataset/dataset_date.py index b885528c..2c267777 100644 --- a/model/dataset/dataset_date.py +++ b/model/dataset/dataset_date.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetDate(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -12,11 +13,7 @@ def __init__(self): data_information = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_date" - ) - - + dataset = db.relationship("Dataset", back_populates="dataset_date") def to_dict(self): return { diff --git a/model/dataset/dataset_de_ident_level.py b/model/dataset/dataset_de_ident_level.py index 08fdec94..17836b6b 100644 --- a/model/dataset/dataset_de_ident_level.py +++ b/model/dataset/dataset_de_ident_level.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetDeIdentLevel(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -17,9 +18,7 @@ def __init__(self): details = db.Column(db.Boolean, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_de_ident_level" - ) + dataset = db.relationship("Dataset", back_populates="dataset_de_ident_level") def to_dict(self): return { @@ -31,7 +30,6 @@ def to_dict(self): "nonarr": self.nonarr, "k_anon": self.k_anon, "details": self.details, - } @staticmethod @@ -46,6 +44,3 @@ def from_data(data: dict): dataset_de_ident_level.details = data["details"] return dataset_de_ident_level - - - diff --git a/model/dataset/dataset_description.py b/model/dataset/dataset_description.py index b265c915..14cb8ac4 100644 --- a/model/dataset/dataset_description.py +++ b/model/dataset/dataset_description.py @@ -12,9 +12,7 @@ def __init__(self): description_type = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_contributors" - ) + dataset = db.relationship("Dataset", back_populates="dataset_contributors") def to_dict(self): return { diff --git a/model/dataset/dataset_funder.py b/model/dataset/dataset_funder.py index f907cbd9..0e07c22d 100644 --- a/model/dataset/dataset_funder.py +++ b/model/dataset/dataset_funder.py @@ -17,9 +17,7 @@ def __init__(self): award_title = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_funder" - ) + dataset = db.relationship("Dataset", back_populates="dataset_funder") def to_dict(self): return { diff --git a/model/dataset/dataset_identifier.py b/model/dataset/dataset_identifier.py index ae65f53b..2faf321e 100644 --- a/model/dataset/dataset_identifier.py +++ b/model/dataset/dataset_identifier.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetIdentifier(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -12,9 +13,7 @@ def __init__(self): alternate = db.Column(db.Boolean, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_identifier" - ) + dataset = db.relationship("Dataset", back_populates="dataset_identifier") def to_dict(self): return { diff --git a/model/dataset/dataset_managing_organization.py b/model/dataset/dataset_managing_organization.py index 93e91b70..34699daa 100644 --- a/model/dataset/dataset_managing_organization.py +++ b/model/dataset/dataset_managing_organization.py @@ -13,9 +13,7 @@ def __init__(self): ror_id = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_managing_organization" - ) + dataset = db.relationship("Dataset", back_populates="dataset_managing_organization") def to_dict(self): return { @@ -30,6 +28,3 @@ def from_data(data: dict): dataset_managing_organization.name = data["name"] dataset_managing_organization.ror_id = data["ror_id"] return dataset_managing_organization - - - diff --git a/model/dataset/dataset_other.py b/model/dataset/dataset_other.py index 9aea3a6e..a981c1a5 100644 --- a/model/dataset/dataset_other.py +++ b/model/dataset/dataset_other.py @@ -15,9 +15,7 @@ def __init__(self): size = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_other" - ) + dataset = db.relationship("Dataset", back_populates="dataset_other") def to_dict(self): return { @@ -33,9 +31,8 @@ def from_data(data: dict): dataset_other = DatasetRights() dataset_other.language = data["language"] dataset_other.managing_organization_name = data["managing_organization_name"] - dataset_other.managing_organization_ror_id = data["managing_organization_ror_id"] + dataset_other.managing_organization_ror_id = data[ + "managing_organization_ror_id" + ] dataset_other.size = data["size"] return dataset_other - - - diff --git a/model/dataset/dataset_record_keys.py b/model/dataset/dataset_record_keys.py index 90f0a0c7..dc2a6de6 100644 --- a/model/dataset/dataset_record_keys.py +++ b/model/dataset/dataset_record_keys.py @@ -12,9 +12,7 @@ def __init__(self): key_details = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_record_keys" - ) + dataset = db.relationship("Dataset", back_populates="dataset_record_keys") def to_dict(self): return { diff --git a/model/dataset/dataset_related_item/dataset_related_item.py b/model/dataset/dataset_related_item/dataset_related_item.py index f52aeb01..b2d1dee1 100644 --- a/model/dataset/dataset_related_item/dataset_related_item.py +++ b/model/dataset/dataset_related_item/dataset_related_item.py @@ -12,9 +12,7 @@ def __init__(self): relation_type = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_related_item" - ) + dataset = db.relationship("Dataset", back_populates="dataset_related_item") def to_dict(self): return { diff --git a/model/dataset/dataset_related_item/dataset_related_item_contributor.py b/model/dataset/dataset_related_item/dataset_related_item_contributor.py index 807f86ef..2a52a92e 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_contributor.py +++ b/model/dataset/dataset_related_item/dataset_related_item_contributor.py @@ -13,7 +13,9 @@ def __init__(self): creator = db.Column(db.Boolean, nullable=False) contributor_type = db.Column(db.String, nullable=False) - dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset_related_item.id") + ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_contributor" ) diff --git a/model/dataset/dataset_related_item/dataset_related_item_identifier.py b/model/dataset/dataset_related_item/dataset_related_item_identifier.py index a7c4c492..89e4c2e0 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_identifier.py +++ b/model/dataset/dataset_related_item/dataset_related_item_identifier.py @@ -14,7 +14,9 @@ def __init__(self): scheme_uri = db.Column(db.String, nullable=False) scheme_type = db.Column(db.String, nullable=False) - dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset_related_item.id") + ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_identifier" ) @@ -26,7 +28,7 @@ def to_dict(self): "type": self.type, "metadata_scheme": self.metadata_scheme, "scheme_uri": self.scheme_uri, - "scheme_type": self.scheme_type + "scheme_type": self.scheme_type, } @staticmethod diff --git a/model/dataset/dataset_related_item/dataset_related_item_other.py b/model/dataset/dataset_related_item/dataset_related_item_other.py index d5a3626d..02f0bc5c 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_other.py +++ b/model/dataset/dataset_related_item/dataset_related_item_other.py @@ -18,7 +18,9 @@ def __init__(self): publisher = db.Column(db.String, nullable=False) edition = db.Column(db.String, nullable=False) - dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset_related_item.id") + ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_other" ) @@ -35,7 +37,7 @@ def to_dict(self): "last_page": self.last_page, "publisher": self.publisher, "edition": self.edition, - "scheme_type": self.scheme_type + "scheme_type": self.scheme_type, } @staticmethod diff --git a/model/dataset/dataset_related_item/dataset_related_item_title.py b/model/dataset/dataset_related_item/dataset_related_item_title.py index 4bafe491..32aa37b1 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_title.py +++ b/model/dataset/dataset_related_item/dataset_related_item_title.py @@ -11,7 +11,9 @@ def __init__(self): type = db.Column(db.String, nullable=False) title = db.Column(db.String, nullable=False) - dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) + dataset_related_item_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset_related_item.id") + ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_title" ) diff --git a/model/dataset/dataset_rights.py b/model/dataset/dataset_rights.py index 0ad40ba8..38963cb3 100644 --- a/model/dataset/dataset_rights.py +++ b/model/dataset/dataset_rights.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetRights(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -14,9 +15,7 @@ def __init__(self): identifier_scheme = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_rights" - ) + dataset = db.relationship("Dataset", back_populates="dataset_rights") def to_dict(self): return { @@ -35,6 +34,3 @@ def from_data(data: dict): dataset_rights.identifier = data["identifier"] dataset_rights.identifier_scheme = data["identifier_scheme"] return dataset_rights - - - diff --git a/model/dataset/dataset_title.py b/model/dataset/dataset_title.py index 43caca7d..8c23655f 100644 --- a/model/dataset/dataset_title.py +++ b/model/dataset/dataset_title.py @@ -15,7 +15,6 @@ def __init__(self): ) contributor_id = db.Column(db.String, db.ForeignKey("dataset.id")) - def to_dict(self): return { "id": self.id, diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index 8618e0ff..32beaede 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -19,9 +19,7 @@ def __init__(self): contributor_type = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_contributors" - ) + dataset = db.relationship("Dataset", back_populates="dataset_contributors") def to_dict(self): return { @@ -34,10 +32,8 @@ def to_dict(self): "name_identifier_scheme_uri": self.name_identifier_scheme_uri, "creator": self.creator, "contributor_type": self.contributor_type, - } - @staticmethod def from_data(data: dict): dataset_contributor = DatasetContributor() @@ -47,7 +43,9 @@ def from_data(data: dict): dataset_contributor.name_type = data["name_type"] dataset_contributor.name_identifier = data["name_identifier"] dataset_contributor.name_identifier_scheme = data["name_identifier_scheme"] - dataset_contributor.name_identifier_scheme_uri = data["name_identifier_scheme_uri"] + dataset_contributor.name_identifier_scheme_uri = data[ + "name_identifier_scheme_uri" + ] dataset_contributor.creator = data["creator"] dataset_contributor.contributor_type = data["contributor_type"] return dataset_contributor diff --git a/model/dataset_version.py b/model/dataset_version.py index aa66f70a..882f6637 100644 --- a/model/dataset_version.py +++ b/model/dataset_version.py @@ -60,5 +60,3 @@ def update(self, data): self.published_on = data["published_on"] self.participants[:] = data["participants"] self.changelog = data["changelog"] - - diff --git a/model/participant.py b/model/participant.py index 208eada7..18cdb3d4 100644 --- a/model/participant.py +++ b/model/participant.py @@ -34,7 +34,7 @@ def to_dict(self): "last_name": self.last_name, "address": self.address, "age": self.age, - "created_at" : str(datetime.now()), + "created_at": str(datetime.now()), "published_on": str(datetime.now()), } diff --git a/model/study.py b/model/study.py index 247b5969..64047545 100644 --- a/model/study.py +++ b/model/study.py @@ -25,7 +25,9 @@ def __init__(self): dataset = db.relationship("Dataset", back_populates="study") study_contributors = db.relationship("StudyContributor", back_populates="study") participants = db.relationship("Participant", back_populates="study") - invited_contributors = db.relationship("StudyInvitedContributor", back_populates="study") + invited_contributors = db.relationship( + "StudyInvitedContributor", back_populates="study" + ) def to_dict(self): """Converts the study to a dictionary""" diff --git a/model/user.py b/model/user.py index 773d290f..726be699 100644 --- a/model/user.py +++ b/model/user.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime +from datetime import datetime from .db import db @@ -18,6 +18,7 @@ def __init__(self): created_at = db.Column(db.DateTime, nullable=False) institution = db.Column(db.String, nullable=False) study_contributors = db.relationship("StudyContributor", back_populates="user") + def to_dict(self): return { "id": self.id, From 22a0e257eff3735fe352c781638aef637da14fe8 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 17 Aug 2023 12:26:30 -0700 Subject: [PATCH 022/505] feat: added reverse connections to dataset models --- model/__init__.py | 86 ++++++++++++++++++- model/dataset.py | 18 +++- model/dataset_contributor.py | 9 +- .../__init__.py | 0 .../dataset_access.py | 1 + .../dataset_consent.py | 12 +-- .../dataset_contributor_affiliation.py | 7 +- .../dataset_date.py | 1 - .../dataset_de_ident_level.py | 14 +-- .../dataset_description.py | 17 ++-- .../dataset_funder.py | 0 .../dataset_identifier.py | 3 +- .../dataset_managing_organization.py | 0 .../dataset_other.py | 16 ++-- model/dataset_metadata/dataset_readme.py | 30 +++++++ .../dataset_record_keys.py | 4 +- .../dataset_related_item.py | 12 +++ .../dataset_related_item_contributor.py | 2 +- .../dataset_related_item_identifier.py | 2 +- .../dataset_related_item_other.py | 4 +- .../dataset_related_item_title.py | 1 - .../dataset_rights.py | 1 + model/dataset_metadata/dataset_subject.py | 45 ++++++++++ .../dataset_title.py | 18 ++-- model/dataset_version.py | 2 +- 25 files changed, 245 insertions(+), 60 deletions(-) rename model/{dataset/dataset_related_item => dataset_metadata}/__init__.py (100%) rename model/{dataset => dataset_metadata}/dataset_access.py (99%) rename model/{dataset => dataset_metadata}/dataset_consent.py (79%) rename model/{dataset => dataset_metadata}/dataset_contributor_affiliation.py (87%) rename model/{dataset => dataset_metadata}/dataset_date.py (95%) rename model/{dataset => dataset_metadata}/dataset_de_ident_level.py (77%) rename model/{dataset => dataset_metadata}/dataset_description.py (56%) rename model/{dataset => dataset_metadata}/dataset_funder.py (100%) rename model/{dataset => dataset_metadata}/dataset_identifier.py (94%) rename model/{dataset => dataset_metadata}/dataset_managing_organization.py (100%) rename model/{dataset => dataset_metadata}/dataset_other.py (68%) create mode 100644 model/dataset_metadata/dataset_readme.py rename model/{dataset => dataset_metadata}/dataset_record_keys.py (90%) rename model/{dataset/dataset_related_item => dataset_metadata}/dataset_related_item.py (62%) rename model/{dataset/dataset_related_item => dataset_metadata}/dataset_related_item_contributor.py (96%) rename model/{dataset/dataset_related_item => dataset_metadata}/dataset_related_item_identifier.py (96%) rename model/{dataset/dataset_related_item => dataset_metadata}/dataset_related_item_other.py (95%) rename model/{dataset/dataset_related_item => dataset_metadata}/dataset_related_item_title.py (99%) rename model/{dataset => dataset_metadata}/dataset_rights.py (99%) create mode 100644 model/dataset_metadata/dataset_subject.py rename model/{dataset => dataset_metadata}/dataset_title.py (50%) diff --git a/model/__init__.py b/model/__init__.py index b254806d..6a92aa1a 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,16 +1,56 @@ -from .dataset import Dataset from .dataset_version import DatasetVersion from .dataset_versions import DatasetVersions from .db import db from .participant import Participant from .study import Study from .user import User +from .dataset import Dataset from .dataset_contributor import DatasetContributor - from .invited_study_contributor import StudyInvitedContributor from .study_contributor import StudyContributor +from .dataset_metadata.dataset_access import DatasetAccess +from .dataset_metadata.dataset_consent import DatasetConsent +from .dataset_metadata.dataset_contributor_affiliation import DatasetContributorAffiliation +from .dataset_metadata.dataset_date import DatasetDate +from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel +from .dataset_metadata.dataset_description import DatasetDescription +from .dataset_metadata.dataset_funder import DatasetFunder +from .dataset_metadata.dataset_identifier import DatasetIdentifier +from .dataset_metadata.dataset_managing_organization import DatasetManagingOrganization +from .dataset_metadata.dataset_other import DatasetOther +from .dataset_metadata.dataset_readme import DatasetReadme +from .dataset_metadata.dataset_record_keys import DatasetRecordKeys +from .dataset_metadata.dataset_rights import DatasetRights +from .dataset_metadata.dataset_title import DatasetTitle +from .dataset_metadata.dataset_subject import DatasetSubject + +from model.dataset_metadata.dataset_related_item_contributor import DatasetRelatedItemContributor +from model.dataset_metadata.dataset_related_item_identifier import DatasetRelatedItemIdentifier +from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther +from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle +from model.dataset_metadata.dataset_related_item import DatasetRelatedItem + +from .study_metadata.study_arm import StudyArm +from .study_metadata.study_available_ipd import StudyAvailableIPD +from .study_metadata.study_contact import StudyContact +from .study_metadata.study_description import StudyDescription +from .study_metadata.study_design import StudyDesign +from .study_metadata.study_eligibility import StudyEligibility +from .study_metadata.study_identification import StudyIdentification +from .study_metadata.study_intervention import StudyIntervention +from .study_metadata.study_ipdsharing import StudyIpdsharing +from .study_metadata.study_link import StudyLink +from .study_metadata.study_location import StudyLocation +from .study_metadata.study_other import StudyOther +from .study_metadata.study_overall_official import StudyOverall +from .study_metadata.study_reference import StudyReference +from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators +from .study_metadata.study_status import StudyStatus + + + __all__ = [ "Study", "Dataset", @@ -21,5 +61,45 @@ "User", "DatasetContributor", "StudyInvitedContributor", - "StudyContributor" + "StudyContributor", + + "DatasetOther", + "DatasetAccess", + "DatasetConsent", + "DatasetContributorAffiliation", + "DatasetDate", + "DatasetDeIdentLevel", + "DatasetContributorAffiliation", + "DatasetFunder", + "DatasetIdentifier", + "DatasetManagingOrganization", + "DatasetRights", + "DatasetReadme", + "DatasetRecordKeys", + "DatasetTitle", + "DatasetSubject", + "DatasetRelatedItemContributor", + "DatasetRelatedItemIdentifier", + "DatasetRelatedItemOther", + "DatasetRelatedItemTitle", + "DatasetRelatedItem", + "DatasetDescription", + "StudyArm", + "StudyAvailableIPD", + "StudyContact", + "StudyDescription", + "StudyDesign", + "StudyEligibility", + "StudyIdentification", + "StudyIntervention", + "StudyIpdsharing", + "StudyLink", + "StudyLocation", + "StudyOther", + "StudyOverall", + "StudyReference", + "StudySponsorsCollaborators", + "StudyStatus", + + ] diff --git a/model/dataset.py b/model/dataset.py index 22eb3f64..e328709a 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -23,10 +23,26 @@ def __init__(self, study): dataset_contributors = db.relationship("DatasetContributor", back_populates="dataset") dataset_versions = db.relationship("DatasetVersion", back_populates="dataset", lazy="dynamic") + dataset_access = db.relationship("DatasetAccess", back_populates="dataset") + dataset_consent = db.relationship("DatasetConsent", back_populates="dataset") + dataset_date = db.relationship("DatasetDate", back_populates="dataset") + dataset_de_ident_level = db.relationship("DatasetDeIdentLevel", back_populates="dataset") + dataset_description = db.relationship("DatasetDescription", back_populates="dataset") + + dataset_funder = db.relationship("DatasetFunder", back_populates="dataset") + dataset_identifier = db.relationship("DatasetIdentifier", back_populates="dataset") + dataset_managing_organization = db.relationship("DatasetManagingOrganization", back_populates="dataset") + dataset_other = db.relationship("DatasetOther", back_populates="dataset") + dataset_readme = db.relationship("DatasetReadme", back_populates="dataset") + dataset_record_keys = db.relationship("DatasetRecordKeys", back_populates="dataset") + dataset_related_item = db.relationship("DatasetRelatedItem", back_populates="dataset") + dataset_rights = db.relationship("DatasetRights", back_populates="dataset") + dataset_subject = db.relationship("DatasetSubject", back_populates="dataset") + dataset_title = db.relationship("DatasetTitle", back_populates="dataset") def to_dict(self): last_published = self.last_published() - last_modified = self.last_modified() + # last_modified = self.last_modified() return { "id": self.id, diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index 8618e0ff..6787c223 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -15,13 +15,13 @@ def __init__(self): name_identifier = db.Column(db.String, nullable=False) name_identifier_scheme = db.Column(db.String, nullable=False) name_identifier_scheme_uri = db.Column(db.String, nullable=False) - creator = db.Column(db.Boolean, nullable=False) + creator = db.Column(db.BOOLEAN, nullable=False) contributor_type = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_contributors" - ) + dataset = db.relationship("Dataset", back_populates="dataset_contributors") + dataset_contributor_affiliation = db.relationship( + "DatasetContributorAffiliation", back_populates="dataset_contributors") def to_dict(self): return { @@ -41,7 +41,6 @@ def to_dict(self): @staticmethod def from_data(data: dict): dataset_contributor = DatasetContributor() - # dataset_contributor.id = data["id"] dataset_contributor.first_name = data["first_name"] dataset_contributor.last_name = data["last_name"] dataset_contributor.name_type = data["name_type"] diff --git a/model/dataset/dataset_related_item/__init__.py b/model/dataset_metadata/__init__.py similarity index 100% rename from model/dataset/dataset_related_item/__init__.py rename to model/dataset_metadata/__init__.py diff --git a/model/dataset/dataset_access.py b/model/dataset_metadata/dataset_access.py similarity index 99% rename from model/dataset/dataset_access.py rename to model/dataset_metadata/dataset_access.py index 297bb628..b1f142d7 100644 --- a/model/dataset/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetAccess(db.Model): def __init__(self): self.id = str(uuid.uuid4()) diff --git a/model/dataset/dataset_consent.py b/model/dataset_metadata/dataset_consent.py similarity index 79% rename from model/dataset/dataset_consent.py rename to model/dataset_metadata/dataset_consent.py index 64132b12..044345b2 100644 --- a/model/dataset/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -9,12 +9,12 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) - noncommercial = db.Column(db.Boolean, nullable=False) - geog_restrict = db.Column(db.Boolean, nullable=False) - research_type = db.Column(db.Boolean, nullable=False) - genetic_only = db.Column(db.Boolean, nullable=False) - no_methods = db.Column(db.Boolean, nullable=False) - details = db.Column(db.Boolean, nullable=False) + noncommercial = db.Column(db.BOOLEAN, nullable=False) + geog_restrict = db.Column(db.BOOLEAN, nullable=False) + research_type = db.Column(db.BOOLEAN, nullable=False) + genetic_only = db.Column(db.BOOLEAN, nullable=False) + no_methods = db.Column(db.BOOLEAN, nullable=False) + details = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship( diff --git a/model/dataset/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py similarity index 87% rename from model/dataset/dataset_contributor_affiliation.py rename to model/dataset_metadata/dataset_contributor_affiliation.py index 3af4ffe8..0b88660c 100644 --- a/model/dataset/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetContributorAffiliation(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -10,11 +11,10 @@ def __init__(self): identifier = db.Column(db.String, nullable=False) identifier_scheme = db.Column(db.String, nullable=False) identifier_scheme_uri = db.Column(db.String, nullable=False) - dataset_contributor = db.relationship( + dataset_contributors = db.relationship( "DatasetContributor", back_populates="dataset_contributor_affiliation" ) - contributor_id = db.Column(db.String, db.ForeignKey("dataset.id")) - + dataset_contributor_id = db.Column(db.String, db.ForeignKey("dataset_contributor.id")) def to_dict(self): return { @@ -27,7 +27,6 @@ def to_dict(self): @staticmethod def from_data(data: dict): dataset_contributor = DatasetContributorAffiliation() - # dataset_contributor.id = data["id"] dataset_contributor.name_identifier = data["identifier"] dataset_contributor.name_identifier_scheme = data["identifier_scheme"] dataset_contributor.name_identifier_scheme_uri = data["identifier_scheme_uri"] diff --git a/model/dataset/dataset_date.py b/model/dataset_metadata/dataset_date.py similarity index 95% rename from model/dataset/dataset_date.py rename to model/dataset_metadata/dataset_date.py index b885528c..d1ef2b6f 100644 --- a/model/dataset/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -29,7 +29,6 @@ def to_dict(self): @staticmethod def from_data(data: dict): dataset_date = DatasetDate() - # dataset_contributor.id = data["id"] dataset_date.date = data["date"] dataset_date.date_type = data["date_type"] dataset_date.data_information = data["data_information"] diff --git a/model/dataset/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py similarity index 77% rename from model/dataset/dataset_de_ident_level.py rename to model/dataset_metadata/dataset_de_ident_level.py index 08fdec94..a13a1a92 100644 --- a/model/dataset/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -5,16 +5,16 @@ class DatasetDeIdentLevel(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - __tablename__ = "datasedataset_de_ident_level" + __tablename__ = "dataset_de_ident_level" id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) - direct = db.Column(db.Boolean, nullable=False) - hipaa = db.Column(db.Boolean, nullable=False) - dates = db.Column(db.Boolean, nullable=False) - nonarr = db.Column(db.Boolean, nullable=False) - k_anon = db.Column(db.Boolean, nullable=False) - details = db.Column(db.Boolean, nullable=False) + direct = db.Column(db.BOOLEAN, nullable=False) + hipaa = db.Column(db.BOOLEAN, nullable=False) + dates = db.Column(db.BOOLEAN, nullable=False) + nonarr = db.Column(db.BOOLEAN, nullable=False) + k_anon = db.Column(db.BOOLEAN, nullable=False) + details = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship( diff --git a/model/dataset/dataset_description.py b/model/dataset_metadata/dataset_description.py similarity index 56% rename from model/dataset/dataset_description.py rename to model/dataset_metadata/dataset_description.py index b265c915..974da190 100644 --- a/model/dataset/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -2,18 +2,18 @@ from ..db import db -class DatasetContributorAffiliation(db.Model): +class DatasetDescription(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - __tablename__ = "dataset_contributor_affiliation" + __tablename__ = "dataset_description" id = db.Column(db.CHAR(36), primary_key=True) description = db.Column(db.String, nullable=False) description_type = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship( - "Dataset", back_populates="dataset_contributors" + "Dataset", back_populates="dataset_description" ) def to_dict(self): @@ -25,9 +25,8 @@ def to_dict(self): @staticmethod def from_data(data: dict): - dataset_contributor = DatasetContributorAffiliation() - # dataset_contributor.id = data["id"] - dataset_contributor.description = data["description"] - dataset_contributor.description_type = data["description_type"] - dataset_contributor.name_identifier_scheme_uri = data["identifier_scheme_uri"] - return dataset_contributor + dataset_description = DatasetDescription() + dataset_description.description = data["description"] + dataset_description.description_type = data["description_type"] + dataset_description.name_identifier_scheme_uri = data["identifier_scheme_uri"] + return dataset_description diff --git a/model/dataset/dataset_funder.py b/model/dataset_metadata/dataset_funder.py similarity index 100% rename from model/dataset/dataset_funder.py rename to model/dataset_metadata/dataset_funder.py diff --git a/model/dataset/dataset_identifier.py b/model/dataset_metadata/dataset_identifier.py similarity index 94% rename from model/dataset/dataset_identifier.py rename to model/dataset_metadata/dataset_identifier.py index ae65f53b..2b90f5f5 100644 --- a/model/dataset/dataset_identifier.py +++ b/model/dataset_metadata/dataset_identifier.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetIdentifier(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -9,7 +10,7 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) identifier_type = db.Column(db.String, nullable=False) - alternate = db.Column(db.Boolean, nullable=False) + alternate = db.Column(db.BOOLEAN, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship( diff --git a/model/dataset/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py similarity index 100% rename from model/dataset/dataset_managing_organization.py rename to model/dataset_metadata/dataset_managing_organization.py diff --git a/model/dataset/dataset_other.py b/model/dataset_metadata/dataset_other.py similarity index 68% rename from model/dataset/dataset_other.py rename to model/dataset_metadata/dataset_other.py index 9aea3a6e..ea3f0f61 100644 --- a/model/dataset/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -1,8 +1,9 @@ import uuid from ..db import db +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY - -class DatasetRights(db.Model): +class DatasetOther(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -12,11 +13,12 @@ def __init__(self): language = db.Column(db.String, nullable=False) managing_organization_name = db.Column(db.String, nullable=False) managing_organization_ror_id = db.Column(db.String, nullable=False) - size = db.Column(db.String, nullable=False) + size = db.Column(ARRAY(String), nullable=False) + standards_followed = db.Column(db.String, nullable=False) + acknowledgement = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_other" + dataset = db.relationship("Dataset", back_populates="dataset_other" ) def to_dict(self): @@ -25,12 +27,14 @@ def to_dict(self): "language": self.language, "managing_organization_name": self.managing_organization_name, "managing_organization_ror_id": self.managing_organization_ror_id, + "standards_followed": self.managing_organization_ror_id, + "acknowledgement": self.size, "size": self.size, } @staticmethod def from_data(data: dict): - dataset_other = DatasetRights() + dataset_other = DatasetOther() dataset_other.language = data["language"] dataset_other.managing_organization_name = data["managing_organization_name"] dataset_other.managing_organization_ror_id = data["managing_organization_ror_id"] diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py new file mode 100644 index 00000000..65d1dede --- /dev/null +++ b/model/dataset_metadata/dataset_readme.py @@ -0,0 +1,30 @@ +import uuid +from ..db import db + +class DatasetReadme(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_readme" + id = db.Column(db.CHAR(36), primary_key=True) + content = db.Column(db.BOOLEAN, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_readme" + ) + + def to_dict(self): + return { + "id": self.id, + "content": self.noncommercial, + } + + @staticmethod + def from_data(data: dict): + dataset_readme = DatasetReadme() + dataset_readme.content = data["content"] + return dataset_readme + + + diff --git a/model/dataset/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py similarity index 90% rename from model/dataset/dataset_record_keys.py rename to model/dataset_metadata/dataset_record_keys.py index 90f0a0c7..fa5b5aea 100644 --- a/model/dataset/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -2,7 +2,7 @@ from ..db import db -class DatasetRecordsKeys(db.Model): +class DatasetRecordKeys(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -25,7 +25,7 @@ def to_dict(self): @staticmethod def from_data(data: dict): - dataset_record_keys = DatasetRecordsKeys() + dataset_record_keys = DatasetRecordKeys() dataset_record_keys.key_type = data["key_type"] dataset_record_keys.key_details = data["key_details"] diff --git a/model/dataset/dataset_related_item/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py similarity index 62% rename from model/dataset/dataset_related_item/dataset_related_item.py rename to model/dataset_metadata/dataset_related_item.py index f52aeb01..8d36fb25 100644 --- a/model/dataset/dataset_related_item/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -15,6 +15,18 @@ def __init__(self): dataset = db.relationship( "Dataset", back_populates="dataset_related_item" ) + dataset_related_item_contributor = db.relationship( + "DatasetRelatedItemContributor", back_populates="dataset_related_item" + ) + dataset_related_item_identifier = db.relationship( + "DatasetRelatedItemIdentifier", back_populates="dataset_related_item" + ) + dataset_related_item_other = db.relationship( + "DatasetRelatedItemOther", back_populates="dataset_related_item" + ) + dataset_related_item_title = db.relationship( + "DatasetRelatedItemTitle", back_populates="dataset_related_item" + ) def to_dict(self): return { diff --git a/model/dataset/dataset_related_item/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py similarity index 96% rename from model/dataset/dataset_related_item/dataset_related_item_contributor.py rename to model/dataset_metadata/dataset_related_item_contributor.py index 807f86ef..b6ba8915 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -10,7 +10,7 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) name = db.Column(db.String, nullable=False) name_type = db.Column(db.String, nullable=False) - creator = db.Column(db.Boolean, nullable=False) + creator = db.Column(db.BOOLEAN, nullable=False) contributor_type = db.Column(db.String, nullable=False) dataset_related_item_id = db.Column(db.CHAR(36), db.ForeignKey("dataset_related_item.id")) diff --git a/model/dataset/dataset_related_item/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py similarity index 96% rename from model/dataset/dataset_related_item/dataset_related_item_identifier.py rename to model/dataset_metadata/dataset_related_item_identifier.py index a7c4c492..264313c9 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -10,7 +10,7 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) type = db.Column(db.String, nullable=False) - metadata_scheme = db.Column(db.Boolean, nullable=False) + metadata_scheme = db.Column(db.String, nullable=False) scheme_uri = db.Column(db.String, nullable=False) scheme_type = db.Column(db.String, nullable=False) diff --git a/model/dataset/dataset_related_item/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py similarity index 95% rename from model/dataset/dataset_related_item/dataset_related_item_other.py rename to model/dataset_metadata/dataset_related_item_other.py index d5a3626d..85288c91 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -10,11 +10,11 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) publication_year = db.Column(db.String, nullable=False) volume = db.Column(db.String, nullable=False) - issue = db.Column(db.Boolean, nullable=False) + issue = db.Column(db.String, nullable=False) number_value = db.Column(db.String, nullable=False) number_type = db.Column(db.String, nullable=False) first_page = db.Column(db.String, nullable=False) - last_page = db.Column(db.Boolean, nullable=False) + last_page = db.Column(db.BOOLEAN, nullable=False) publisher = db.Column(db.String, nullable=False) edition = db.Column(db.String, nullable=False) diff --git a/model/dataset/dataset_related_item/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py similarity index 99% rename from model/dataset/dataset_related_item/dataset_related_item_title.py rename to model/dataset_metadata/dataset_related_item_title.py index 4bafe491..1717b4b7 100644 --- a/model/dataset/dataset_related_item/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -1,7 +1,6 @@ import uuid from ..db import db - class DatasetRelatedItemTitle(db.Model): def __init__(self): self.id = str(uuid.uuid4()) diff --git a/model/dataset/dataset_rights.py b/model/dataset_metadata/dataset_rights.py similarity index 99% rename from model/dataset/dataset_rights.py rename to model/dataset_metadata/dataset_rights.py index 0ad40ba8..2e603cca 100644 --- a/model/dataset/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetRights(db.Model): def __init__(self): self.id = str(uuid.uuid4()) diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py new file mode 100644 index 00000000..4b92e1f6 --- /dev/null +++ b/model/dataset_metadata/dataset_subject.py @@ -0,0 +1,45 @@ +import uuid +from ..db import db + + +class DatasetSubject(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "dataset_subject" + id = db.Column(db.CHAR(36), primary_key=True) + + subject = db.Column(db.String, nullable=False) + scheme = db.Column(db.String, nullable=False) + scheme_uri = db.Column(db.String, nullable=False) + value_uri = db.Column(db.String, nullable=False) + classification_code = db.Column(db.String, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset = db.relationship( + "Dataset", back_populates="dataset_subject" + ) + + def to_dict(self): + return { + "id": self.id, + "subject": self.subject, + "scheme": self.scheme, + "scheme_uri": self.scheme_uri, + "value_uri": self.value_uri, + "classification_code": self.classification_code, + + } + + @staticmethod + def from_data(data: dict): + dataset_subject = DatasetRights() + dataset_subject.subject = data["subject"] + dataset_subject.scheme = data["scheme"] + dataset_subject.scheme_uri = data["scheme_uri"] + dataset_subject.value_uri = data["value_uri"] + dataset_subject.classification_code = data["classification_code"] + return dataset_subject + + + diff --git a/model/dataset/dataset_title.py b/model/dataset_metadata/dataset_title.py similarity index 50% rename from model/dataset/dataset_title.py rename to model/dataset_metadata/dataset_title.py index 43caca7d..30b034be 100644 --- a/model/dataset/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -6,14 +6,14 @@ class DatasetTitle(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - __tablename__ = "dataset_contributor_affiliation" + __tablename__ = "dataset_title" id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) type = db.Column(db.String, nullable=False) - dataset_contributor = db.relationship( - "DatasetContributor", back_populates="dataset_title" + dataset = db.relationship( + "Dataset", back_populates="dataset_title" ) - contributor_id = db.Column(db.String, db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.String, db.ForeignKey("dataset.id")) def to_dict(self): @@ -25,8 +25,8 @@ def to_dict(self): @staticmethod def from_data(data: dict): - dataset_contributor = DatasetTitle() - # dataset_contributor.id = data["id"] - dataset_contributor.title = data["title"] - dataset_contributor.type = data["type"] - return dataset_contributor + dataset_title = DatasetTitle() + + dataset_title.title = data["title"] + dataset_title.type = data["type"] + return dataset_title diff --git a/model/dataset_version.py b/model/dataset_version.py index aa66f70a..7c4c54f2 100644 --- a/model/dataset_version.py +++ b/model/dataset_version.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime -from model import Dataset +from model.dataset import Dataset from .db import db From e029d9c33536b184e5a7f47a6d59bccd0da2b783 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 17 Aug 2023 13:39:44 -0700 Subject: [PATCH 023/505] feat: added sql files --- app.py | 13 ++- init/all_tables.sql | 246 +++++++++++++++++++++++++++++++++++++++ init/specific_tables.sql | 72 ++++++++++++ 3 files changed, 329 insertions(+), 2 deletions(-) create mode 100644 init/all_tables.sql create mode 100644 init/specific_tables.sql diff --git a/app.py b/app.py index a4f402e6..ef518265 100644 --- a/app.py +++ b/app.py @@ -46,8 +46,17 @@ def create_app(): @app.cli.command("create-schema") def create_schema(): - """Create the database schema.""" - model.db.create_all() + engine = model.db.session.get_bind() + with engine.begin() as conn: + """Create the database schema.""" + model.db.create_all() + + @app.cli.command("destroy-schema") + def destroy_schema(): + engine = model.db.session.get_bind() + with engine.begin() as conn: + """Create the database schema.""" + model.db.drop_all() # # @api.route("/") diff --git a/init/all_tables.sql b/init/all_tables.sql new file mode 100644 index 00000000..6afa9626 --- /dev/null +++ b/init/all_tables.sql @@ -0,0 +1,246 @@ +-- -------------------------------------------------------- +-- Host: 7hg.h.filess.io +-- Server version: PostgreSQL 14.4 on x86_64-pc-linux-musl, compiled by gcc (Alpine 11.2.1_git20220219) 11.2.1 20220219, 64-bit +-- Server OS: +-- HeidiSQL Version: 12.3.0.6589 +-- -------------------------------------------------------- + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET NAMES */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- Dumping data for table public.dataset: -1 rows +/*!40000 ALTER TABLE "dataset" DISABLE KEYS */; +INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'); +/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; + +-- Dumping data for table public.dataset_access: -1 rows +/*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; + +-- Dumping data for table public.dataset_consent: -1 rows +/*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; + +-- Dumping data for table public.dataset_contributor: -1 rows +/*!40000 ALTER TABLE "dataset_contributor" DISABLE KEYS */; +INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_contributor" ENABLE KEYS */; + +-- Dumping data for table public.dataset_contributor_affiliation: -1 rows +/*!40000 ALTER TABLE "dataset_contributor_affiliation" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_contributor_affiliation" ENABLE KEYS */; + +-- Dumping data for table public.dataset_date: -1 rows +/*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; + +-- Dumping data for table public.dataset_description: -1 rows +/*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; + +-- Dumping data for table public.dataset_de_ident_level: -1 rows +/*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; + +-- Dumping data for table public.dataset_funder: -1 rows +/*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; + +-- Dumping data for table public.dataset_identifier: -1 rows +/*!40000 ALTER TABLE "dataset_identifier" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_identifier" ENABLE KEYS */; + +-- Dumping data for table public.dataset_managing_organization: -1 rows +/*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; + +-- Dumping data for table public.dataset_other: -1 rows +/*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; + +-- Dumping data for table public.dataset_readme: -1 rows +/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; + +-- Dumping data for table public.dataset_record_keys: -1 rows +/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item: -1 rows +/*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_contributor: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_identifier: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_identifier" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_identifier" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_other: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_other" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_other" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_title: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_title" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_title" ENABLE KEYS */; + +-- Dumping data for table public.dataset_rights: -1 rows +/*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; + +-- Dumping data for table public.dataset_subject: -1 rows +/*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; + +-- Dumping data for table public.dataset_title: -1 rows +/*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; + +-- Dumping data for table public.dataset_version: -1 rows +/*!40000 ALTER TABLE "dataset_version" DISABLE KEYS */; +INSERT INTO "dataset_version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'AIREADI1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'AIREADI4', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000003', 'AIREADI3', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); +/*!40000 ALTER TABLE "dataset_version" ENABLE KEYS */; + +-- Dumping data for table public.invited_study_contributor: -1 rows +/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; +INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES + ('aydan.gasimova@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('bhavesh.patel@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000003'), + ('sanjay.soundarajan@@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000004'); +/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; + +-- Dumping data for table public.participant: -1 rows +/*!40000 ALTER TABLE "participant" DISABLE KEYS */; +INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova', '1221d kibler drive', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000004'); +/*!40000 ALTER TABLE "participant" ENABLE KEYS */; + +-- Dumping data for table public.study: -1 rows +/*!40000 ALTER TABLE "study" DISABLE KEYS */; +INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://loremflickr.com/640/480?lock=342651989655552', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-03 12:33:10', '2023-01-03 12:33:11'), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://loremflickr.com/640/480?lock=342651989655552', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), + ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://loremflickr.com/640/480?lock=342651989655552', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), + ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://loremflickr.com/640/480?lock=342651989655552', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); +/*!40000 ALTER TABLE "study" ENABLE KEYS */; + +-- Dumping data for table public.study_arm: -1 rows +/*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; + +-- Dumping data for table public.study_available: -1 rows +/*!40000 ALTER TABLE "study_available" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_available" ENABLE KEYS */; + +-- Dumping data for table public.study_contact: -1 rows +/*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; + +-- Dumping data for table public.study_contributor: -1 rows +/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; +INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES + ('editor', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000004'), + ('editor', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000006'); +/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; + +-- Dumping data for table public.study_description: -1 rows +/*!40000 ALTER TABLE "study_description" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_description" ENABLE KEYS */; + +-- Dumping data for table public.study_design: -1 rows +/*!40000 ALTER TABLE "study_design" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_design" ENABLE KEYS */; + +-- Dumping data for table public.study_eligibility: 0 rows +/*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; + +-- Dumping data for table public.study_identification: -1 rows +/*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; + +-- Dumping data for table public.study_intervention: -1 rows +/*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; + +-- Dumping data for table public.study_ipdsharing: -1 rows +/*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; + +-- Dumping data for table public.study_link: -1 rows +/*!40000 ALTER TABLE "study_link" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_link" ENABLE KEYS */; + +-- Dumping data for table public.study_location: -1 rows +/*!40000 ALTER TABLE "study_location" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_location" ENABLE KEYS */; + +-- Dumping data for table public.study_other: -1 rows +/*!40000 ALTER TABLE "study_other" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_other" ENABLE KEYS */; + +-- Dumping data for table public.study_overall_official: -1 rows +/*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; + +-- Dumping data for table public.study_reference: -1 rows +/*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; + +-- Dumping data for table public.study_sponsors_collaborators: -1 rows +/*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; + +-- Dumping data for table public.study_status: -1 rows +/*!40000 ALTER TABLE "study_status" DISABLE KEYS */; +/*!40000 ALTER TABLE "study_status" ENABLE KEYS */; + +-- Dumping data for table public.user: -1 rows +/*!40000 ALTER TABLE "user" DISABLE KEYS */; +INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES + ('00000000-0000-0000-0000-000000000001', 'bhavesh.patel@gmail.com', 'bhavesh', 'Bhavesh', 'Patel', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000002', 'sanjay.soundarajan@gmail.com', 'sanjay', 'sanjay', 'soundarajan', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000003', 'billy.sanders@gmail.com', 'billy', 'billy', 'sanders', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000004', 'james.lilly@gmail.com', 'james', 'james', 'lilly', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'); +/*!40000 ALTER TABLE "user" ENABLE KEYS */; + +-- Dumping data for table public.version_participants: -1 rows +/*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; +INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; + +/*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; +/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; +/*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40111 SET SQL_NOTES=IFNULL(@OLD_SQL_NOTES, 1) */; diff --git a/init/specific_tables.sql b/init/specific_tables.sql new file mode 100644 index 00000000..a510f187 --- /dev/null +++ b/init/specific_tables.sql @@ -0,0 +1,72 @@ +BEGIN; + +INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://loremflickr.com/640/480?lock=342651989655552', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-03 12:33:10', '2023-01-03 12:33:11'), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://loremflickr.com/640/480?lock=342651989655552', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), + ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://loremflickr.com/640/480?lock=342651989655552', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), + ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://loremflickr.com/640/480?lock=342651989655552', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); +/*!40000 ALTER TABLE "study" ENABLE KEYS */; + + +INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES + ('00000000-0000-0000-0000-000000000001', 'bhavesh.patel@gmail.com', 'bhavesh', 'Bhavesh', 'Patel', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000002', 'sanjay.soundarajan@gmail.com', 'sanjay', 'sanjay', 'soundarajan', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000003', 'billy.sanders@gmail.com', 'billy', 'billy', 'sanders', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000004', 'james.lilly@gmail.com', 'james', 'james', 'lilly', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'); +/*!40000 ALTER TABLE "user" ENABLE KEYS */; + + +INSERT INTO "study_contributor" ("study_id", "user_id", "permission") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001', 'editor'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002', 'owner'), + ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000003', 'owner'), + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000004', 'editor'); + + +INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'); +/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; + +INSERT INTO "dataset_version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'AIREADI1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'AIREADI4', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000003', 'AIREADI3', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); +/*!40000 ALTER TABLE "dataset_version" ENABLE KEYS */; + +INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); + + +INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES + ('aydan.gasimova@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('bhavesh.patel@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000003'), + ('sanjay.soundarajan@@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000004'); + +INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova', '1221d kibler drive', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000004'); + +INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); + + +COMMIT; + + + + + From 67a4310206b630ba0142ba9464422551ed874852 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 17 Aug 2023 13:44:19 -0700 Subject: [PATCH 024/505] fix: study models --- model/__init__.py | 8 +-- model/study.py | 19 +++++++ model/{study => study_metadata}/study_arm.py | 2 +- .../study_available_ipd.py} | 6 +-- .../study_contact.py | 2 +- .../study_description.py | 0 .../{study => study_metadata}/study_design.py | 12 +---- .../study_eligibility.py | 7 ++- model/study_metadata/study_identification.py | 53 +++++++++++++++++++ .../study_intervention.py | 0 .../study_ipdsharing.py | 2 - model/{study => study_metadata}/study_link.py | 0 .../study_location.py | 0 .../{study => study_metadata}/study_other.py | 2 +- .../study_overall_official.py | 12 ++--- .../study_reference.py | 2 +- .../study_sponsors_collaborators.py | 0 .../{study => study_metadata}/study_status.py | 0 18 files changed, 95 insertions(+), 32 deletions(-) rename model/{study => study_metadata}/study_arm.py (97%) rename model/{study/study_avaliable_ipd.py => study_metadata/study_available_ipd.py} (93%) rename model/{study => study_metadata}/study_contact.py (97%) rename model/{study => study_metadata}/study_description.py (100%) rename model/{study => study_metadata}/study_design.py (96%) rename model/{study => study_metadata}/study_eligibility.py (89%) create mode 100644 model/study_metadata/study_identification.py rename model/{study => study_metadata}/study_intervention.py (100%) rename model/{study => study_metadata}/study_ipdsharing.py (99%) rename model/{study => study_metadata}/study_link.py (100%) rename model/{study => study_metadata}/study_location.py (100%) rename model/{study => study_metadata}/study_other.py (96%) rename model/{study => study_metadata}/study_overall_official.py (84%) rename model/{study => study_metadata}/study_reference.py (96%) rename model/{study => study_metadata}/study_sponsors_collaborators.py (100%) rename model/{study => study_metadata}/study_status.py (100%) diff --git a/model/__init__.py b/model/__init__.py index 6a92aa1a..53519737 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -33,7 +33,7 @@ from model.dataset_metadata.dataset_related_item import DatasetRelatedItem from .study_metadata.study_arm import StudyArm -from .study_metadata.study_available_ipd import StudyAvailableIPD +from .study_metadata.study_available_ipd import StudyAvailableIpd from .study_metadata.study_contact import StudyContact from .study_metadata.study_description import StudyDescription from .study_metadata.study_design import StudyDesign @@ -44,7 +44,7 @@ from .study_metadata.study_link import StudyLink from .study_metadata.study_location import StudyLocation from .study_metadata.study_other import StudyOther -from .study_metadata.study_overall_official import StudyOverall +from .study_metadata.study_overall_official import StudyOverallOfficial from .study_metadata.study_reference import StudyReference from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus @@ -85,7 +85,7 @@ "DatasetRelatedItem", "DatasetDescription", "StudyArm", - "StudyAvailableIPD", + "StudyAvailableIpd", "StudyContact", "StudyDescription", "StudyDesign", @@ -96,7 +96,7 @@ "StudyLink", "StudyLocation", "StudyOther", - "StudyOverall", + "StudyOverallOfficial", "StudyReference", "StudySponsorsCollaborators", "StudyStatus", diff --git a/model/study.py b/model/study.py index 247b5969..90eac644 100644 --- a/model/study.py +++ b/model/study.py @@ -27,6 +27,25 @@ def __init__(self): participants = db.relationship("Participant", back_populates="study") invited_contributors = db.relationship("StudyInvitedContributor", back_populates="study") + study_arm = db.relationship("StudyArm", back_populates="study") + study_available_ipd = db.relationship("StudyAvailableIpd", back_populates="study") + study_contact = db.relationship("StudyContact", back_populates="study") + study_description = db.relationship("StudyDescription", back_populates="study") + study_design = db.relationship("StudyDesign", back_populates="study") + study_eligibility = db.relationship("StudyEligibility", back_populates="study") + study_identification = db.relationship("StudyIdentification", back_populates="study") + study_intervention = db.relationship("StudyIntervention", back_populates="study") + study_ipdsharing = db.relationship("StudyIpdsharing", back_populates="study") + study_link = db.relationship("StudyLink", back_populates="study") + study_location = db.relationship("StudyLocation", back_populates="study") + study_other = db.relationship("StudyOther", back_populates="study") + study_overall_official = db.relationship("StudyOverallOfficial", back_populates="study") + study_reference = db.relationship("StudyReference", back_populates="study") + study_sponsors_collaborators = db.relationship("StudySponsorsCollaborators", back_populates="study") + study_status = db.relationship("StudyStatus", back_populates="study") + + + def to_dict(self): """Converts the study to a dictionary""" return { diff --git a/model/study/study_arm.py b/model/study_metadata/study_arm.py similarity index 97% rename from model/study/study_arm.py rename to model/study_metadata/study_arm.py index 9d824f14..c8db9681 100644 --- a/model/study/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -10,7 +10,6 @@ class StudyArm(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - # self.created_at = datetime.now() __tablename__ = "study_arm" @@ -23,6 +22,7 @@ def __init__(self): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_arm") + def to_dict(self): """Converts the study to a dictionary""" return { diff --git a/model/study/study_avaliable_ipd.py b/model/study_metadata/study_available_ipd.py similarity index 93% rename from model/study/study_avaliable_ipd.py rename to model/study_metadata/study_available_ipd.py index 4186ec0f..4a22ea9e 100644 --- a/model/study/study_avaliable_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -3,7 +3,7 @@ from ..db import db -class StudyAvailable(db.Model): +class StudyAvailableIpd(db.Model): """A study is a collection of datasets and participants""" def __init__(self): @@ -17,7 +17,7 @@ def __init__(self): comment = db.Column(db.String, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) - study = db.relationship("Study", back_populates="study_available") + study = db.relationship("Study", back_populates="study_available_ipd") def to_dict(self): """Converts the study to a dictionary""" @@ -32,7 +32,7 @@ def to_dict(self): @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" - study_available = StudyAvailable() + study_available = StudyAvailableIpd() study_available.update(data) return study_available diff --git a/model/study/study_contact.py b/model/study_metadata/study_contact.py similarity index 97% rename from model/study/study_contact.py rename to model/study_metadata/study_contact.py index 1ca0d531..de4116fe 100644 --- a/model/study/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -18,7 +18,7 @@ def __init__(self): phone = db.Column(db.String, nullable=False) phone_ext = db.Column(db.String, nullable=False) email_address = db.Column(db.String, nullable=False) - central_contact = db.Column(db.Boolean, nullable=False) + central_contact = db.Column(db.BOOLEAN, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_contact") diff --git a/model/study/study_description.py b/model/study_metadata/study_description.py similarity index 100% rename from model/study/study_description.py rename to model/study_metadata/study_description.py diff --git a/model/study/study_design.py b/model/study_metadata/study_design.py similarity index 96% rename from model/study/study_design.py rename to model/study_metadata/study_design.py index 57296a2d..f16e480a 100644 --- a/model/study/study_design.py +++ b/model/study_metadata/study_design.py @@ -22,7 +22,7 @@ def __init__(self): design_masking_description = db.Column(db.String, nullable=False) design_who_masked_list = db.Column(ARRAY(String), nullable=False) phase_list = db.Column(ARRAY(String), nullable=False) - enrollment_count = db.Column(db.String, nullable=False) + enrollment_count = db.Column(db.Integer, nullable=False) enrollment_type = db.Column(db.String, nullable=False) number_arms = db.Column(db.Integer, nullable=False) design_observational_model_list = db.Column(ARRAY(String), nullable=False) @@ -57,7 +57,6 @@ def to_dict(self): "bio_spec_description": self.bio_spec_description, "target_duration": self.target_duration, "number_groups_cohorts": str(self.number_groups_cohorts), - "intervention_list": self.intervention_list } @staticmethod @@ -88,15 +87,6 @@ def update(self, data): self.bio_spec_description = data["bio_spec_description"] self.target_duration = data["target_duration"] self.number_groups_cohorts = data["number_groups_cohorts"] - self.intervention_list = data["intervention_list"] - - - - - - - - def validate(self): """Validates the study""" diff --git a/model/study/study_eligibility.py b/model/study_metadata/study_eligibility.py similarity index 89% rename from model/study/study_eligibility.py rename to model/study_metadata/study_eligibility.py index 8619f0d3..7be5e7be 100644 --- a/model/study/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -14,10 +14,11 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) gender = db.Column(db.String, nullable=False) - gender_based = db.Column(db.Boolean, nullable=False) + gender_based = db.Column(db.BOOLEAN, nullable=False) gender_description = db.Column(db.String, nullable=False) minimum_age = db.Column(db.String, nullable=False) - maximum_age = db.Column(db.Boolean, nullable=False) + maximum_age = db.Column(db.String, nullable=False) + healthy_volunteers = db.Column(db.BOOLEAN, nullable=False) inclusion_criteria = db.Column(ARRAY(String), nullable=False) exclusion_criteria = db.Column(ARRAY(String), nullable=False) study_population = db.Column(db.String, nullable=False) @@ -35,6 +36,7 @@ def to_dict(self): "gender_description": self.gender_description, "minimum_age": self.miminum_age, "maximum_age": self.maximum_age, + "healthy_volunteers": self.healthy_volunteers, "inclusion_criteria": self.inclusion_criteria, "exclusion_criteria": self.exclusion_criteria, "study_population": self.study_population, @@ -57,6 +59,7 @@ def update(self, data): self.gender_description = data["gender_description"] self.minimum_age = data["minimum_age"] self.maximum_age = data["maximum_age"] + self.healthy_volunteers = data["healthy_volunteers"] self.inclusion_criteria = data["inclusion_criteria"] self.exclusion_criteria = data["exclusion_criteria"] self.study_population = data["study_population"] diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py new file mode 100644 index 00000000..530576dd --- /dev/null +++ b/model/study_metadata/study_identification.py @@ -0,0 +1,53 @@ +import uuid +from ..db import db + + +class StudyIdentification(db.Model): + """A study is a collection of datasets and participants""" + + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "study_identification" + + id = db.Column(db.CHAR(36), primary_key=True) + identifier = db.Column(db.String, nullable=False) + identifier_type = db.Column(db.String, nullable=False) + identifier_domain = db.Column(db.String, nullable=False) + identifier_link = db.Column(db.String, nullable=False) + secondary = db.Column(db.BOOLEAN, nullable=False) + + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study = db.relationship("Study", back_populates="study_identification") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "identifier": self.identifier, + "identifier_type": self.identifier_type, + "identifier_domain": self.identifier_domain, + "identifier_link": self.identifier_link, + "secondary": self.secondary, + } + + @staticmethod + def from_data(data: dict): + """Creates a new study from a dictionary""" + study_identification = StudyIdentification() + study_identification.update(data) + + return study_identification + + def update(self, data): + """Updates the study from a dictionary""" + self.identifier = data["identifier"] + self.identifier_type = data["identifier_type"] + self.identifier_domain = data["identifier_domain"] + self.identifier_link = data["identifier_link"] + self.secondary = data["secondary"] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations = [] + return violations diff --git a/model/study/study_intervention.py b/model/study_metadata/study_intervention.py similarity index 100% rename from model/study/study_intervention.py rename to model/study_metadata/study_intervention.py diff --git a/model/study/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py similarity index 99% rename from model/study/study_ipdsharing.py rename to model/study_metadata/study_ipdsharing.py index d3c9ab1a..914c5670 100644 --- a/model/study/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -33,8 +33,6 @@ def to_dict(self): "ipd_sharing_time_frame": self.ipd_sharing_time_frame, "ipd_sharing_access_criteria": self.ipd_sharing_access_criteria, "ipd_sharing_url": self.ipd_sharing_url, - - } @staticmethod diff --git a/model/study/study_link.py b/model/study_metadata/study_link.py similarity index 100% rename from model/study/study_link.py rename to model/study_metadata/study_link.py diff --git a/model/study/study_location.py b/model/study_metadata/study_location.py similarity index 100% rename from model/study/study_location.py rename to model/study_metadata/study_location.py diff --git a/model/study/study_other.py b/model/study_metadata/study_other.py similarity index 96% rename from model/study/study_other.py rename to model/study_metadata/study_other.py index 8a95a221..e5f19bbe 100644 --- a/model/study/study_other.py +++ b/model/study_metadata/study_other.py @@ -13,7 +13,7 @@ def __init__(self): __tablename__ = "study_other" id = db.Column(db.CHAR(36), primary_key=True) - oversight_has_dmc = db.Column(db.Boolean, nullable=False) + oversight_has_dmc = db.Column(db.BOOLEAN, nullable=False) conditions = db.Column(ARRAY(String), nullable=False) keywords = db.Column(ARRAY(String), nullable=False) size = db.Column(db.String, nullable=False) diff --git a/model/study/study_overall_official.py b/model/study_metadata/study_overall_official.py similarity index 84% rename from model/study/study_overall_official.py rename to model/study_metadata/study_overall_official.py index 4a242084..eac4d53d 100644 --- a/model/study/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -2,12 +2,12 @@ from ..db import db -class StudyOverall(db.Model): +class StudyOverallOfficial(db.Model): """A study is a collection of datasets and participants""" def __init__(self): self.id = str(uuid.uuid4()) - __tablename__ = "study_overall" + __tablename__ = "study_overall_official" id = db.Column(db.CHAR(36), primary_key=True) first_name = db.Column(db.String, nullable=False) @@ -16,7 +16,7 @@ def __init__(self): role = db.Column(db.String, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) - study = db.relationship("Study", back_populates="study_overall") + study = db.relationship("Study", back_populates="study_overall_official") def to_dict(self): """Converts the study to a dictionary""" @@ -31,10 +31,10 @@ def to_dict(self): @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" - study_overall = StudyOverall() - study_overall.update(data) + study_overall_official = StudyOverallOfficial() + study_overall_official.update(data) - return study_overall + return study_overall_official def update(self, data): """Updates the study from a dictionary""" diff --git a/model/study/study_reference.py b/model/study_metadata/study_reference.py similarity index 96% rename from model/study/study_reference.py rename to model/study_metadata/study_reference.py index f08d0aa1..2f68d8e8 100644 --- a/model/study/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -13,7 +13,7 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) title = db.Column(db.String, nullable=False) - type = db.Column(db.Boolean, nullable=False) + type = db.Column(db.BOOLEAN, nullable=False) citation = db.Column(db.String, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) diff --git a/model/study/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py similarity index 100% rename from model/study/study_sponsors_collaborators.py rename to model/study_metadata/study_sponsors_collaborators.py diff --git a/model/study/study_status.py b/model/study_metadata/study_status.py similarity index 100% rename from model/study/study_status.py rename to model/study_metadata/study_status.py From fb789657bdc7adb96b6753a36f740af7b92dcd26 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 17 Aug 2023 20:46:48 +0000 Subject: [PATCH 025/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/__init__.py | 16 +++++++----- model/dataset.py | 26 ++++++++++++++----- model/dataset_contributor.py | 9 ++++--- .../dataset_contributor_affiliation.py | 4 ++- model/dataset_metadata/dataset_description.py | 4 +-- model/dataset_metadata/dataset_other.py | 11 ++++---- model/dataset_metadata/dataset_readme.py | 8 ++---- .../dataset_metadata/dataset_related_item.py | 4 +-- .../dataset_related_item_title.py | 1 + model/dataset_metadata/dataset_subject.py | 8 +----- model/dataset_metadata/dataset_title.py | 4 +-- model/study.py | 14 ++++++---- model/study_metadata/study_arm.py | 2 -- model/study_metadata/study_available_ipd.py | 1 + model/study_metadata/study_description.py | 4 +-- model/study_metadata/study_design.py | 9 +++++-- model/study_metadata/study_eligibility.py | 1 - model/study_metadata/study_identification.py | 2 +- model/study_metadata/study_intervention.py | 2 +- model/study_metadata/study_link.py | 6 +---- model/study_metadata/study_location.py | 1 + model/study_metadata/study_other.py | 2 +- .../study_metadata/study_overall_official.py | 2 +- model/study_metadata/study_reference.py | 2 +- .../study_sponsors_collaborators.py | 17 ++++++++---- model/study_metadata/study_status.py | 1 - 26 files changed, 86 insertions(+), 75 deletions(-) diff --git a/model/__init__.py b/model/__init__.py index 53519737..40b69469 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -12,7 +12,9 @@ from .dataset_metadata.dataset_access import DatasetAccess from .dataset_metadata.dataset_consent import DatasetConsent -from .dataset_metadata.dataset_contributor_affiliation import DatasetContributorAffiliation +from .dataset_metadata.dataset_contributor_affiliation import ( + DatasetContributorAffiliation, +) from .dataset_metadata.dataset_date import DatasetDate from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel from .dataset_metadata.dataset_description import DatasetDescription @@ -26,8 +28,12 @@ from .dataset_metadata.dataset_title import DatasetTitle from .dataset_metadata.dataset_subject import DatasetSubject -from model.dataset_metadata.dataset_related_item_contributor import DatasetRelatedItemContributor -from model.dataset_metadata.dataset_related_item_identifier import DatasetRelatedItemIdentifier +from model.dataset_metadata.dataset_related_item_contributor import ( + DatasetRelatedItemContributor, +) +from model.dataset_metadata.dataset_related_item_identifier import ( + DatasetRelatedItemIdentifier, +) from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle from model.dataset_metadata.dataset_related_item import DatasetRelatedItem @@ -50,7 +56,6 @@ from .study_metadata.study_status import StudyStatus - __all__ = [ "Study", "Dataset", @@ -62,7 +67,6 @@ "DatasetContributor", "StudyInvitedContributor", "StudyContributor", - "DatasetOther", "DatasetAccess", "DatasetConsent", @@ -100,6 +104,4 @@ "StudyReference", "StudySponsorsCollaborators", "StudyStatus", - - ] diff --git a/model/dataset.py b/model/dataset.py index e328709a..0911ff3c 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -21,21 +21,33 @@ def __init__(self, study): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="dataset") - dataset_contributors = db.relationship("DatasetContributor", back_populates="dataset") - dataset_versions = db.relationship("DatasetVersion", back_populates="dataset", lazy="dynamic") + dataset_contributors = db.relationship( + "DatasetContributor", back_populates="dataset" + ) + dataset_versions = db.relationship( + "DatasetVersion", back_populates="dataset", lazy="dynamic" + ) dataset_access = db.relationship("DatasetAccess", back_populates="dataset") dataset_consent = db.relationship("DatasetConsent", back_populates="dataset") dataset_date = db.relationship("DatasetDate", back_populates="dataset") - dataset_de_ident_level = db.relationship("DatasetDeIdentLevel", back_populates="dataset") - dataset_description = db.relationship("DatasetDescription", back_populates="dataset") + dataset_de_ident_level = db.relationship( + "DatasetDeIdentLevel", back_populates="dataset" + ) + dataset_description = db.relationship( + "DatasetDescription", back_populates="dataset" + ) dataset_funder = db.relationship("DatasetFunder", back_populates="dataset") dataset_identifier = db.relationship("DatasetIdentifier", back_populates="dataset") - dataset_managing_organization = db.relationship("DatasetManagingOrganization", back_populates="dataset") + dataset_managing_organization = db.relationship( + "DatasetManagingOrganization", back_populates="dataset" + ) dataset_other = db.relationship("DatasetOther", back_populates="dataset") dataset_readme = db.relationship("DatasetReadme", back_populates="dataset") dataset_record_keys = db.relationship("DatasetRecordKeys", back_populates="dataset") - dataset_related_item = db.relationship("DatasetRelatedItem", back_populates="dataset") + dataset_related_item = db.relationship( + "DatasetRelatedItem", back_populates="dataset" + ) dataset_rights = db.relationship("DatasetRights", back_populates="dataset") dataset_subject = db.relationship("DatasetSubject", back_populates="dataset") dataset_title = db.relationship("DatasetTitle", back_populates="dataset") @@ -49,7 +61,7 @@ def to_dict(self): "updated_on": str(datetime.now()), "created_at": str(datetime.now()), "dataset_versions": [i.to_dict() for i in self.dataset_versions], - "latest_version": last_published.id if last_published else None + "latest_version": last_published.id if last_published else None, } def last_published(self): diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index 6787c223..45e70242 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -21,7 +21,8 @@ def __init__(self): dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship("Dataset", back_populates="dataset_contributors") dataset_contributor_affiliation = db.relationship( - "DatasetContributorAffiliation", back_populates="dataset_contributors") + "DatasetContributorAffiliation", back_populates="dataset_contributors" + ) def to_dict(self): return { @@ -34,10 +35,8 @@ def to_dict(self): "name_identifier_scheme_uri": self.name_identifier_scheme_uri, "creator": self.creator, "contributor_type": self.contributor_type, - } - @staticmethod def from_data(data: dict): dataset_contributor = DatasetContributor() @@ -46,7 +45,9 @@ def from_data(data: dict): dataset_contributor.name_type = data["name_type"] dataset_contributor.name_identifier = data["name_identifier"] dataset_contributor.name_identifier_scheme = data["name_identifier_scheme"] - dataset_contributor.name_identifier_scheme_uri = data["name_identifier_scheme_uri"] + dataset_contributor.name_identifier_scheme_uri = data[ + "name_identifier_scheme_uri" + ] dataset_contributor.creator = data["creator"] dataset_contributor.contributor_type = data["contributor_type"] return dataset_contributor diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index 0b88660c..0cd6d703 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -14,7 +14,9 @@ def __init__(self): dataset_contributors = db.relationship( "DatasetContributor", back_populates="dataset_contributor_affiliation" ) - dataset_contributor_id = db.Column(db.String, db.ForeignKey("dataset_contributor.id")) + dataset_contributor_id = db.Column( + db.String, db.ForeignKey("dataset_contributor.id") + ) def to_dict(self): return { diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 974da190..5c9ea55d 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -12,9 +12,7 @@ def __init__(self): description_type = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_description" - ) + dataset = db.relationship("Dataset", back_populates="dataset_description") def to_dict(self): return { diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index ea3f0f61..17b9a281 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -3,6 +3,7 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY + class DatasetOther(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -18,8 +19,7 @@ def __init__(self): acknowledgement = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship("Dataset", back_populates="dataset_other" - ) + dataset = db.relationship("Dataset", back_populates="dataset_other") def to_dict(self): return { @@ -37,9 +37,8 @@ def from_data(data: dict): dataset_other = DatasetOther() dataset_other.language = data["language"] dataset_other.managing_organization_name = data["managing_organization_name"] - dataset_other.managing_organization_ror_id = data["managing_organization_ror_id"] + dataset_other.managing_organization_ror_id = data[ + "managing_organization_ror_id" + ] dataset_other.size = data["size"] return dataset_other - - - diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index 65d1dede..ccf6e41e 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetReadme(db.Model): def __init__(self): self.id = str(uuid.uuid4()) @@ -10,9 +11,7 @@ def __init__(self): content = db.Column(db.BOOLEAN, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_readme" - ) + dataset = db.relationship("Dataset", back_populates="dataset_readme") def to_dict(self): return { @@ -25,6 +24,3 @@ def from_data(data: dict): dataset_readme = DatasetReadme() dataset_readme.content = data["content"] return dataset_readme - - - diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index 8d36fb25..ed60469d 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -12,9 +12,7 @@ def __init__(self): relation_type = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_related_item" - ) + dataset = db.relationship("Dataset", back_populates="dataset_related_item") dataset_related_item_contributor = db.relationship( "DatasetRelatedItemContributor", back_populates="dataset_related_item" ) diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 037b2774..32aa37b1 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class DatasetRelatedItemTitle(db.Model): def __init__(self): self.id = str(uuid.uuid4()) diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 4b92e1f6..7714288b 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -16,9 +16,7 @@ def __init__(self): classification_code = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship( - "Dataset", back_populates="dataset_subject" - ) + dataset = db.relationship("Dataset", back_populates="dataset_subject") def to_dict(self): return { @@ -28,7 +26,6 @@ def to_dict(self): "scheme_uri": self.scheme_uri, "value_uri": self.value_uri, "classification_code": self.classification_code, - } @staticmethod @@ -40,6 +37,3 @@ def from_data(data: dict): dataset_subject.value_uri = data["value_uri"] dataset_subject.classification_code = data["classification_code"] return dataset_subject - - - diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index b3befc5e..c0f8c61a 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -10,9 +10,7 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) type = db.Column(db.String, nullable=False) - dataset = db.relationship( - "Dataset", back_populates="dataset_title" - ) + dataset = db.relationship("Dataset", back_populates="dataset_title") dataset_id = db.Column(db.String, db.ForeignKey("dataset.id")) def to_dict(self): diff --git a/model/study.py b/model/study.py index 85028eb4..0de969c6 100644 --- a/model/study.py +++ b/model/study.py @@ -35,19 +35,23 @@ def __init__(self): study_description = db.relationship("StudyDescription", back_populates="study") study_design = db.relationship("StudyDesign", back_populates="study") study_eligibility = db.relationship("StudyEligibility", back_populates="study") - study_identification = db.relationship("StudyIdentification", back_populates="study") + study_identification = db.relationship( + "StudyIdentification", back_populates="study" + ) study_intervention = db.relationship("StudyIntervention", back_populates="study") study_ipdsharing = db.relationship("StudyIpdsharing", back_populates="study") study_link = db.relationship("StudyLink", back_populates="study") study_location = db.relationship("StudyLocation", back_populates="study") study_other = db.relationship("StudyOther", back_populates="study") - study_overall_official = db.relationship("StudyOverallOfficial", back_populates="study") + study_overall_official = db.relationship( + "StudyOverallOfficial", back_populates="study" + ) study_reference = db.relationship("StudyReference", back_populates="study") - study_sponsors_collaborators = db.relationship("StudySponsorsCollaborators", back_populates="study") + study_sponsors_collaborators = db.relationship( + "StudySponsorsCollaborators", back_populates="study" + ) study_status = db.relationship("StudyStatus", back_populates="study") - - def to_dict(self): """Converts the study to a dictionary""" return { diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index c8db9681..799d447d 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -22,7 +22,6 @@ def __init__(self): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_arm") - def to_dict(self): """Converts the study to a dictionary""" return { @@ -48,7 +47,6 @@ def update(self, data): self.description = data["description"] self.intervention_list = data["intervention_list"] - def validate(self): """Validates the study""" violations = [] diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 4a22ea9e..65c657b3 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -8,6 +8,7 @@ class StudyAvailableIpd(db.Model): def __init__(self): self.id = str(uuid.uuid4()) + __tablename__ = "study_available" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index 4a226504..9fbd47b8 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class StudyDescription(db.Model): """A study is a collection of datasets and participants""" @@ -22,8 +23,7 @@ def to_dict(self): "id": self.id, "brief_summary": self.brief_summary, "detailed_description": self.detailed_description, - - } + } @staticmethod def from_data(data: dict): diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index f16e480a..59cdb7a6 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -10,6 +10,7 @@ class StudyDesign(db.Model): def __init__(self): self.id = str(uuid.uuid4()) + __tablename__ = "study_design" id = db.Column(db.CHAR(36), primary_key=True) @@ -51,7 +52,9 @@ def to_dict(self): "enrollment_count": self.enrollment_count, "enrollment_type": self.enrollment_type, "number_arms": self.number_arms, - "design_observational_model_list": str(self.design_observational_model_list), + "design_observational_model_list": str( + self.design_observational_model_list + ), "design_time_perspective_list": self.design_time_perspective_list, "bio_spec_retention": self.bio_spec_retention, "bio_spec_description": self.bio_spec_description, @@ -72,7 +75,9 @@ def update(self, data): self.design_allocation = data["design_allocation"] self.study_type = data["study_type"] self.design_interventional_model = data["design_interventional_model"] - self.design_intervention_model_description = data["design_intervention_model_description"] + self.design_intervention_model_description = data[ + "design_intervention_model_description" + ] self.design_primary_purpose = data["design_primary_purpose"] self.design_masking = data["design_masking"] self.design_masking_description = data["design_masking_description"] diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 7be5e7be..5e781ab0 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -41,7 +41,6 @@ def to_dict(self): "exclusion_criteria": self.exclusion_criteria, "study_population": self.study_population, "sampling_method": self.sampling_method, - } @staticmethod diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 530576dd..11b5c1ab 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -29,7 +29,7 @@ def to_dict(self): "identifier_domain": self.identifier_domain, "identifier_link": self.identifier_link, "secondary": self.secondary, - } + } @staticmethod def from_data(data: dict): diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 7d096c90..d20d270b 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -31,7 +31,7 @@ def to_dict(self): "description": self.description, "arm_group_label_list": self.arm_group_label_list, "other_name_list": self.other_name_list, - } + } @staticmethod def from_data(data: dict): diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 19479631..44d379cb 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -19,11 +19,7 @@ def __init__(self): def to_dict(self): """Converts the study to a dictionary""" - return { - "id": self.id, - "url": self.url, - "title": self.title - } + return {"id": self.id, "url": self.url, "title": self.title} @staticmethod def from_data(data: dict): diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 12d1d5c4..88bd7915 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -1,6 +1,7 @@ import uuid from ..db import db + class StudyLocation(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index e5f19bbe..04c3b04b 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -10,6 +10,7 @@ class StudyOther(db.Model): def __init__(self): self.id = str(uuid.uuid4()) + __tablename__ = "study_other" id = db.Column(db.CHAR(36), primary_key=True) @@ -46,7 +47,6 @@ def update(self, data): self.keywords = data["keywords"] self.size = data["size"] - def validate(self): """Validates the study""" violations = [] diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index eac4d53d..b1140d45 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -7,6 +7,7 @@ class StudyOverallOfficial(db.Model): def __init__(self): self.id = str(uuid.uuid4()) + __tablename__ = "study_overall_official" id = db.Column(db.CHAR(36), primary_key=True) @@ -43,7 +44,6 @@ def update(self, data): self.affiliation = data["affiliation"] self.role = data["role"] - def validate(self): """Validates the study""" violations = [] diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 2f68d8e8..afa685b0 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -8,6 +8,7 @@ class StudyReference(db.Model): def __init__(self): self.id = str(uuid.uuid4()) + __tablename__ = "study_reference" id = db.Column(db.CHAR(36), primary_key=True) @@ -44,7 +45,6 @@ def update(self, data): self.type = data["type"] self.citation = data["citation"] - def validate(self): """Validates the study""" violations = [] diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 6b40bee3..52ec4a54 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -37,7 +37,6 @@ def to_dict(self): "lead_sponsor_first_name": self.lead_sponsor_first_name, "lead_sponsor_last_name": self.lead_sponsor_last_name, "collaborator_name": self.collaborator_name, - } @staticmethod @@ -51,10 +50,18 @@ def from_data(data: dict): def update(self, data): """Updates the study from a dictionary""" self.responsible_party_type = data["responsible_party_type"] - self.responsible_party_investigator_first_name = data["responsible_party_investigator_first_name"] - self.responsible_party_investigator_last_name = data["responsible_party_investigator_last_name"] - self.responsible_party_investigator_title = data["responsible_party_investigator_title"] - self.responsible_party_investigator_affiliation = data["responsible_party_investigator_affiliation"] + self.responsible_party_investigator_first_name = data[ + "responsible_party_investigator_first_name" + ] + self.responsible_party_investigator_last_name = data[ + "responsible_party_investigator_last_name" + ] + self.responsible_party_investigator_title = data[ + "responsible_party_investigator_title" + ] + self.responsible_party_investigator_affiliation = data[ + "responsible_party_investigator_affiliation" + ] self.lead_sponsor_first_name = data["lead_sponsor_first_name"] self.lead_sponsor_last_name = data["lead_sponsor_last_name"] self.collaborator_name = data["collaborator_name"] diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 55e77080..5d4065e9 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -33,7 +33,6 @@ def to_dict(self): "start_date_type": self.start_date_type, "completion_date": str(self.completion_date), "completion_date_type": self.completion_date_type, - } @staticmethod From 6848d7acb83cb5f3bb35161a7b1a25614f8efefe Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 17 Aug 2023 15:42:21 -0700 Subject: [PATCH 026/505] fix:contributor marshall model fixed --- apis/contributor.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 4c27a7d7..cefb6cd0 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,6 +1,6 @@ from flask_restx import Namespace, Resource, fields -from model import User +from model import StudyContributor api = Namespace("contributor", description="contributors", path="/") @@ -8,15 +8,10 @@ contributors_model = api.model( "DatasetVersion", { - "id": fields.String(required=True), - "affiliations": fields.String(required=True), - "email": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "orcid": fields.String(required=True), - "roles": fields.List(fields.String, required=True), + "user_id": fields.String(required=True), "permission": fields.String(required=True), - "status": fields.String(required=True), + "study_id": fields.String(required=True), + }, ) @@ -27,8 +22,7 @@ class AddParticipant(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The contributor identifier") - # @api.marshal_with(contributors_model) - # @api.marshal_with(contributor) + @api.marshal_with(contributors_model) def get(self, study_id: int): - contributors = User.query.all() + contributors = StudyContributor.query.all() return [c.to_dict() for c in contributors] From f2240389c38fd5026f32ca6c6122b596a11a6a07 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 21 Aug 2023 16:50:04 -0700 Subject: [PATCH 027/505] feat: added study metadata endpoints --- apis/__init__.py | 49 +++++++++++++++++++++ apis/dataset.py | 45 +++++++++---------- apis/participant.py | 6 ++- apis/study_metadata/study_arm.py | 32 ++++++++++++++ apis/study_metadata/study_available_ipd.py | 32 ++++++++++++++ apis/study_metadata/study_contact.py | 5 +++ model/dataset.py | 23 +++++----- model/study.py | 2 - model/study_metadata/study_arm.py | 2 +- model/study_metadata/study_available_ipd.py | 2 +- 10 files changed, 155 insertions(+), 43 deletions(-) create mode 100644 apis/study_metadata/study_arm.py create mode 100644 apis/study_metadata/study_available_ipd.py create mode 100644 apis/study_metadata/study_contact.py diff --git a/apis/__init__.py b/apis/__init__.py index 21408c47..2baa272b 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -7,6 +7,31 @@ from .participant import api as participants_api from .study import api as study_api +from .study_metadata.study_arm import api as arm +from .study_metadata.study_available_ipd import api as available_ipd +from .study_metadata.study_contact import api as contact +from .study_metadata.study_description import api as description +from .study_metadata.study_design import api as design +from .study_metadata.study_eligibility import api as eligibility +from .study_metadata.study_identification import api as identification +from .study_metadata.study_intervention import api as intervention +from .study_metadata.study_ipdsharing import api as ipdsharing +from .study_metadata.study_link import api as link +from .study_metadata.study_location import api as location +from .study_metadata.study_other import api as other +from .study_metadata.study_overall_official import api as overall_official +from .study_metadata.study_reference import api as reference +from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator +from .study_metadata.study_status import api as status + + + + + + + + + api = Api( title="FAIRHUB", description="The backend api system for the fairhub vue app", @@ -29,3 +54,27 @@ def get(self): api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) + +api.add_namespace(arm) +api.add_namespace(available_ipd) +api.add_namespace(contact) +# api.add_namespace(description) +# api.add_namespace(design) +# api.add_namespace(eligibility) +# api.add_namespace(identification) +# api.add_namespace(intervention) +# api.add_namespace(ipdsharing) +# api.add_namespace(link) +# api.add_namespace(location) +# api.add_namespace(other) +# api.add_namespace(overall_official) +# api.add_namespace(reference) +# api.add_namespace(sponsors_collaborator) +# api.add_namespace(status) + + + + + + + diff --git a/apis/dataset.py b/apis/dataset.py index 0bdc01fb..51131701 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -33,7 +33,7 @@ @api.route("/study//dataset") -class AddDataset(Resource): +class DatasetList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("add dataset", params={"id": "An ID"}) @@ -47,21 +47,31 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("update dataset") - @api.marshal_with(dataset) + # @api.marshal_with(dataset) def post(self, study_id): - data = request.json study = Study.query.get(study_id) # todo if study.participant id== different study Throw error - dataset_obj = Dataset(study) - dataset_versions = DatasetVersion.from_data(dataset_obj, data) - db.session.add(dataset_obj) - db.session.add(dataset_versions) + dataset_ = Dataset.from_data(study, request.json) + db.session.add(dataset_) + db.session.commit() + return dataset_.to_dict() + + +# TODO not finalized endpoint. have to set functionality +@api.route("/study//dataset/") +@api.response(201, "Success") +@api.response(400, "Validation Error") +class DatasetResource(Resource): + def put(self, study_id, dataset_id): + data = request.json + data_obj = Dataset.query.get(dataset_id) + data_obj.update(data) db.session.commit() - return dataset_versions.to_dict() + return data_obj.to_dict() @api.route("/study//dataset//version/") -class UpdateDataset(Resource): +class Version(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("dataset version") @@ -90,7 +100,7 @@ def delete(self, study_id, dataset_id, version_id): @api.route("/study//dataset//version") @api.response(201, "Success") @api.response(400, "Validation Error") -class PostDatasetVersion(Resource): +class VersionList(Resource): def post(self, study_id: int, dataset_id: int): data = request.json data["participants"] = [Participant.query.get(i) for i in data["participants"]] @@ -99,18 +109,3 @@ def post(self, study_id: int, dataset_id: int): db.session.add(dataset_versions) db.session.commit() return jsonify(dataset_versions.to_dict()) - - -# TODO not finalized endpoint. have to set functionality -@api.route("/study//dataset/") -@api.response(201, "Success") -@api.response(400, "Validation Error") -class PostDataset(Resource): - def put(study_id, dataset_id): - data = request.json - data["participants"] = [Participant.query.get(i) for i in data["participants"]] - data_obj = Dataset.query.get(dataset_id) - dataset_ = Dataset.from_data(data_obj, data) - db.session.add(dataset_) - db.session.commit() - return jsonify(dataset_.to_dict()) diff --git a/apis/participant.py b/apis/participant.py index de131ad4..39b0517d 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -11,9 +11,11 @@ "id": fields.String(required=True), "first_name": fields.String(required=True), "last_name": fields.String(required=True), - "firstname": fields.String(required=True), + "created_at": fields.String(required=True), + "updated_on": fields.String(required=True), "address": fields.String(required=True), "age": fields.String(required=True), + }, ) @@ -24,7 +26,7 @@ class AddParticipant(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "Adding participants") - # @api.marshal_with(participants) + @api.marshal_with(participant_model) def get(self, study_id: int): participants = Participant.query.all() return [p.to_dict() for p in participants] diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py new file mode 100644 index 00000000..dc6edd32 --- /dev/null +++ b/apis/study_metadata/study_arm.py @@ -0,0 +1,32 @@ +from flask_restx import Namespace, Resource, fields +from model import Study + + +api = Namespace("arm", description="study operations", path="/") + + +study_arm = api.model( + "StudyArm", + { + "id": fields.String(required=True), + "label": fields.String(required=True), + "type": fields.String(required=True), + "description": fields.String(required=True), + "intervention_list": fields.List(fields.String, required=True), + + }, +) + +@api.route("/study//metadata/arm") +class StudyArm(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_arm) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_arm_ = study_.study_arm + return [s.to_dict() for s in study_arm_] + + diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py new file mode 100644 index 00000000..7eab8248 --- /dev/null +++ b/apis/study_metadata/study_available_ipd.py @@ -0,0 +1,32 @@ +from flask_restx import Namespace, Resource, fields +from model import Study + +api = Namespace("available_ipd", description="study operations", path="/") + + +study_available = api.model( + "StudyAvailable", + { + "id": fields.String(required=True), + "label": fields.String(required=True), + "type": fields.String(required=True), + "description": fields.String(required=True), + "intervention_list": fields.List(fields.String, required=True), + + }, +) + + +@api.route("/study//metadata/available") +class StudyArmResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + # @api.marshal_with(study_available) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_available_ = study_.study_available_ipd + return [s.to_dict() for s in study_available_] + + diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py new file mode 100644 index 00000000..3f903a2d --- /dev/null +++ b/apis/study_metadata/study_contact.py @@ -0,0 +1,5 @@ +from flask import request +from flask_restx import Namespace, Resource, fields + + +api = Namespace("study", description="study operations", path="/") diff --git a/model/dataset.py b/model/dataset.py index e328709a..8899299e 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -12,7 +12,7 @@ class Dataset(db.Model): def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) - + self.created_at = datetime.now() __tablename__ = "dataset" id = db.Column(db.CHAR(36), primary_key=True) updated_on = db.Column(db.DateTime, nullable=False) @@ -23,6 +23,7 @@ def __init__(self, study): dataset_contributors = db.relationship("DatasetContributor", back_populates="dataset") dataset_versions = db.relationship("DatasetVersion", back_populates="dataset", lazy="dynamic") + dataset_access = db.relationship("DatasetAccess", back_populates="dataset") dataset_consent = db.relationship("DatasetConsent", back_populates="dataset") dataset_date = db.relationship("DatasetDate", back_populates="dataset") @@ -48,7 +49,7 @@ def to_dict(self): "id": self.id, "updated_on": str(datetime.now()), "created_at": str(datetime.now()), - "dataset_versions": [i.to_dict() for i in self.dataset_versions], + # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None } @@ -65,14 +66,12 @@ def last_modified(self): ).first() @staticmethod - def from_data(data: dict): - """Creates a new dataset from a dictionary""" - dataset = Dataset() - dataset.latest_version = data["latest_version"] - dataset.published_year = data["published_year"] - dataset.resource_type = data["resource_type"] - dataset.publisher = data["publisher"] - dataset.primary_language = data["primary_language"] - dataset.keywords = data["keywords"] + def from_data(study, data: dict): + dataset_obj = Dataset(study) + dataset_obj.update(data) + return dataset_obj - return dataset + def update(self, data: dict): + """Creates a new dataset from a dictionary""" + self.updated_on = datetime.now() + # self.dataset_versions = data["dataset_versions"] diff --git a/model/study.py b/model/study.py index 85028eb4..68ffa4bd 100644 --- a/model/study.py +++ b/model/study.py @@ -46,8 +46,6 @@ def __init__(self): study_sponsors_collaborators = db.relationship("StudySponsorsCollaborators", back_populates="study") study_status = db.relationship("StudyStatus", back_populates="study") - - def to_dict(self): """Converts the study to a dictionary""" return { diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index c8db9681..4b86c69a 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -16,7 +16,7 @@ def __init__(self): id = db.Column(db.CHAR(36), primary_key=True) label = db.Column(db.String, nullable=False) type = db.Column(db.String, nullable=False) - description = db.Column(db.DateTime, nullable=False) + description = db.Column(db.String, nullable=False) intervention_list = db.Column(ARRAY(String), nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 4a22ea9e..1f2efca6 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -8,7 +8,7 @@ class StudyAvailableIpd(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - __tablename__ = "study_available" + __tablename__ = "study_available_ipd" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) From ab972e98ab23c5b41907a98b7dde2e78231ef6cd Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 21 Aug 2023 17:22:53 -0700 Subject: [PATCH 028/505] fix: design and description --- apis/__init__.py | 4 +- apis/study_metadata/study_available_ipd.py | 9 ++--- apis/study_metadata/study_contact.py | 34 +++++++++++++++- apis/study_metadata/study_description.py | 32 +++++++++++++++ apis/study_metadata/study_design.py | 45 ++++++++++++++++++++++ 5 files changed, 116 insertions(+), 8 deletions(-) create mode 100644 apis/study_metadata/study_description.py create mode 100644 apis/study_metadata/study_design.py diff --git a/apis/__init__.py b/apis/__init__.py index 2baa272b..c4d83058 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -58,8 +58,8 @@ def get(self): api.add_namespace(arm) api.add_namespace(available_ipd) api.add_namespace(contact) -# api.add_namespace(description) -# api.add_namespace(design) +api.add_namespace(description) +api.add_namespace(design) # api.add_namespace(eligibility) # api.add_namespace(identification) # api.add_namespace(intervention) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 7eab8248..ae71d95a 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -8,11 +8,10 @@ "StudyAvailable", { "id": fields.String(required=True), - "label": fields.String(required=True), + "identifier": fields.String(required=True), "type": fields.String(required=True), - "description": fields.String(required=True), - "intervention_list": fields.List(fields.String, required=True), - + "comment": fields.String(required=True), + "url": fields.String(required=True), }, ) @@ -23,7 +22,7 @@ class StudyArmResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - # @api.marshal_with(study_available) + @api.marshal_with(study_available) def get(self, study_id: int): study_ = Study.query.get(study_id) study_available_ = study_.study_available_ipd diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 3f903a2d..4acafaf1 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -1,5 +1,37 @@ -from flask import request +from model import Study + from flask_restx import Namespace, Resource, fields api = Namespace("study", description="study operations", path="/") + +study_contact = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + + }, +) + + +@api.route("/study//metadata/contact") +class StudyArmResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_contact) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_contact_ = study_.study_contact + return [s.to_dict() for s in study_contact_] + + diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py new file mode 100644 index 00000000..62e26b40 --- /dev/null +++ b/apis/study_metadata/study_description.py @@ -0,0 +1,32 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("description", description="study operations", path="/") + + +study_description = api.model( + "StudyDescription", + { + "id": fields.String(required=True), + "brief_summary": fields.String(required=True), + "detailed_description": fields.String(required=True), + + }, +) + + +@api.route("/study//metadata/description") +class StudyArmResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_description) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_description_ = study_.study_description + return [s.to_dict() for s in study_description_] + + diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py new file mode 100644 index 00000000..c7042f89 --- /dev/null +++ b/apis/study_metadata/study_design.py @@ -0,0 +1,45 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + +api = Namespace("design", description="study operations", path="/") + +study_design = api.model( + "StudyDesign", + { + "id": fields.String(required=True), + "design_allocation": fields.String(required=True), + "study_type": fields.String(required=True), + "design_interventional_model": fields.String(required=True), + "design_intervention_model_description": fields.String(required=True), + "design_primary_purpose": fields.String(required=True), + "design_masking": fields.String(required=True), + "design_masking_description": fields.String(required=True), + "design_who_masked_list": fields.List(fields.String, required=True), + "phase_list": fields.List(fields.String, required=True), + "enrollment_count": fields.Integer(required=True), + "enrollment_type": fields.String(required=True), + "number_arms": fields.Integer(required=True), + "design_observational_model_list": fields.List(fields.String, required=True), + "design_time_perspective_list": fields.List(fields.String, required=True), + "bio_spec_retention": fields.String(required=True), + "bio_spec_description": fields.String(required=True), + "target_duration": fields.String(required=True), + "number_groups_cohorts": fields.Integer(required=True), + }, +) + + +@api.route("/study//metadata/design") +class StudyArmResource(Resource): + @api.doc("list_design") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + # @api.marshal_with(study_design) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_design_ = study_.study_design + return [s.to_dict() for s in study_design_] + + From f09871ea3c160072af78b9d9a05e120c23356c7b Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 21 Aug 2023 17:48:19 -0700 Subject: [PATCH 029/505] fix: metadata eligibility and identification --- apis/__init__.py | 4 +-- apis/study_metadata/study_eligibility.py | 40 +++++++++++++++++++++ apis/study_metadata/study_identification.py | 34 ++++++++++++++++++ model/study_metadata/study_eligibility.py | 2 +- 4 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 apis/study_metadata/study_eligibility.py create mode 100644 apis/study_metadata/study_identification.py diff --git a/apis/__init__.py b/apis/__init__.py index c4d83058..4f7da443 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -60,8 +60,8 @@ def get(self): api.add_namespace(contact) api.add_namespace(description) api.add_namespace(design) -# api.add_namespace(eligibility) -# api.add_namespace(identification) +api.add_namespace(eligibility) +api.add_namespace(identification) # api.add_namespace(intervention) # api.add_namespace(ipdsharing) # api.add_namespace(link) diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py new file mode 100644 index 00000000..bf0b0f6b --- /dev/null +++ b/apis/study_metadata/study_eligibility.py @@ -0,0 +1,40 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("eligibility", description="study operations", path="/") + + +study_eligibility = api.model( + "StudyEligibility", + { + "id": fields.String(required=True), + "gender": fields.String(required=True), + "gender_based": fields.Boolean(required=True), + "gender_description": fields.String(required=True), + "minimum_age": fields.String(required=True), + "maximum_age": fields.String(required=True), + "healthy_volunteers": fields.Boolean(required=True), + "inclusion_criteria": fields.List(fields.String, required=True), + "exclusion_criteria": fields.List(fields.String, required=True), + "study_population": fields.String(required=True), + "sampling_method": fields.String(required=True), + + }, +) + + +@api.route("/study//metadata/eligibility") +class StudyArmResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + # @api.marshal_with(study_eligibility) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_eligibility_ = study_.study_eligibility + return [s.to_dict() for s in study_eligibility_] + + diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py new file mode 100644 index 00000000..061db974 --- /dev/null +++ b/apis/study_metadata/study_identification.py @@ -0,0 +1,34 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("identification", description="study operations", path="/") + + +study_identification = api.model( + "StudyIdentification", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.Boolean(required=True), + "identifier_domain": fields.String(required=True), + "identifier_link": fields.String(required=True), + "secondary": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/identification") +class StudyArmResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + # @api.marshal_with(study_identification) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_identification_ = study_.study_identification + return [s.to_dict() for s in study_identification_] + + diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 7be5e7be..c7ef16c7 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -34,7 +34,7 @@ def to_dict(self): "gender": self.gender, "gender_based": self.gender_based, "gender_description": self.gender_description, - "minimum_age": self.miminum_age, + "minimum_age": self.minimum_age, "maximum_age": self.maximum_age, "healthy_volunteers": self.healthy_volunteers, "inclusion_criteria": self.inclusion_criteria, From 424033120ed6dd25b2b668b54f49ad787e82d5a2 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 21 Aug 2023 17:59:49 -0700 Subject: [PATCH 030/505] fix: metadata intervention and ipdsharing --- apis/__init__.py | 8 ++++---- apis/study_metadata/study_available_ipd.py | 2 +- apis/study_metadata/study_contact.py | 2 +- apis/study_metadata/study_description.py | 2 +- apis/study_metadata/study_design.py | 4 ++-- apis/study_metadata/study_eligibility.py | 4 ++-- apis/study_metadata/study_identification.py | 8 ++++---- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 4f7da443..61d9308b 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -62,10 +62,10 @@ def get(self): api.add_namespace(design) api.add_namespace(eligibility) api.add_namespace(identification) -# api.add_namespace(intervention) -# api.add_namespace(ipdsharing) -# api.add_namespace(link) -# api.add_namespace(location) +api.add_namespace(intervention) +api.add_namespace(ipdsharing) +api.add_namespace(link) +api.add_namespace(location) # api.add_namespace(other) # api.add_namespace(overall_official) # api.add_namespace(reference) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index ae71d95a..93b84d6b 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -17,7 +17,7 @@ @api.route("/study//metadata/available") -class StudyArmResource(Resource): +class StudyAvailableResource(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 4acafaf1..764d043f 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -23,7 +23,7 @@ @api.route("/study//metadata/contact") -class StudyArmResource(Resource): +class StudyContactResource(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 62e26b40..2b5543bd 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -18,7 +18,7 @@ @api.route("/study//metadata/description") -class StudyArmResource(Resource): +class StudyDescriptionResource(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index c7042f89..d8b0046c 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -31,12 +31,12 @@ @api.route("/study//metadata/design") -class StudyArmResource(Resource): +class StudyDesignResource(Resource): @api.doc("list_design") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - # @api.marshal_with(study_design) + @api.marshal_with(study_design) def get(self, study_id: int): study_ = Study.query.get(study_id) study_design_ = study_.study_design diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index bf0b0f6b..6aec03d1 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -26,12 +26,12 @@ @api.route("/study//metadata/eligibility") -class StudyArmResource(Resource): +class StudyEligibilityResource(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - # @api.marshal_with(study_eligibility) + @api.marshal_with(study_eligibility) def get(self, study_id: int): study_ = Study.query.get(study_id) study_eligibility_ = study_.study_eligibility diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 061db974..7956ef42 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -11,21 +11,21 @@ { "id": fields.String(required=True), "identifier": fields.String(required=True), - "identifier_type": fields.Boolean(required=True), + "identifier_type": fields.String(required=True), "identifier_domain": fields.String(required=True), "identifier_link": fields.String(required=True), - "secondary": fields.Boolean(required=True), + "secondary": fields.Boolean(required=True) }, ) @api.route("/study//metadata/identification") -class StudyArmResource(Resource): +class StudyIdentificationResource(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - # @api.marshal_with(study_identification) + @api.marshal_with(study_identification) def get(self, study_id: int): study_ = Study.query.get(study_id) study_identification_ = study_.study_identification From 90cc516cf0f99025348ab79aa7e1368fed5f94df Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 22 Aug 2023 01:01:02 +0000 Subject: [PATCH 031/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 14 ----------- apis/contributor.py | 1 - apis/participant.py | 1 - apis/study_metadata/study_arm.py | 4 +-- apis/study_metadata/study_available_ipd.py | 2 -- apis/study_metadata/study_contact.py | 3 --- apis/study_metadata/study_description.py | 3 --- apis/study_metadata/study_design.py | 2 -- apis/study_metadata/study_eligibility.py | 3 --- apis/study_metadata/study_identification.py | 4 +-- model/dataset.py | 27 +++++++++++++++------ model/study_metadata/study_available_ipd.py | 1 + 12 files changed, 23 insertions(+), 42 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 61d9308b..4c8a30a5 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -25,13 +25,6 @@ from .study_metadata.study_status import api as status - - - - - - - api = Api( title="FAIRHUB", description="The backend api system for the fairhub vue app", @@ -71,10 +64,3 @@ def get(self): # api.add_namespace(reference) # api.add_namespace(sponsors_collaborator) # api.add_namespace(status) - - - - - - - diff --git a/apis/contributor.py b/apis/contributor.py index cefb6cd0..622d53e1 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -11,7 +11,6 @@ "user_id": fields.String(required=True), "permission": fields.String(required=True), "study_id": fields.String(required=True), - }, ) diff --git a/apis/participant.py b/apis/participant.py index 39b0517d..36c17f41 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -15,7 +15,6 @@ "updated_on": fields.String(required=True), "address": fields.String(required=True), "age": fields.String(required=True), - }, ) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index dc6edd32..15fdc3bb 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -13,10 +13,10 @@ "type": fields.String(required=True), "description": fields.String(required=True), "intervention_list": fields.List(fields.String, required=True), - }, ) + @api.route("/study//metadata/arm") class StudyArm(Resource): @api.doc("list_study") @@ -28,5 +28,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_arm_ = study_.study_arm return [s.to_dict() for s in study_arm_] - - diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 93b84d6b..70191530 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -27,5 +27,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_available_ = study_.study_available_ipd return [s.to_dict() for s in study_available_] - - diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 764d043f..c25d4ec7 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -17,7 +17,6 @@ "phone_ext": fields.String(required=True), "email_address": fields.String(required=True), "central_contact": fields.Boolean(required=True), - }, ) @@ -33,5 +32,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_contact_ = study_.study_contact return [s.to_dict() for s in study_contact_] - - diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 2b5543bd..1b5a6271 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -12,7 +12,6 @@ "id": fields.String(required=True), "brief_summary": fields.String(required=True), "detailed_description": fields.String(required=True), - }, ) @@ -28,5 +27,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_description_ = study_.study_description return [s.to_dict() for s in study_description_] - - diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index d8b0046c..8f743b2d 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -41,5 +41,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_design_ = study_.study_design return [s.to_dict() for s in study_design_] - - diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 6aec03d1..79956be8 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -20,7 +20,6 @@ "exclusion_criteria": fields.List(fields.String, required=True), "study_population": fields.String(required=True), "sampling_method": fields.String(required=True), - }, ) @@ -36,5 +35,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_eligibility_ = study_.study_eligibility return [s.to_dict() for s in study_eligibility_] - - diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 7956ef42..fa82369d 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -14,7 +14,7 @@ "identifier_type": fields.String(required=True), "identifier_domain": fields.String(required=True), "identifier_link": fields.String(required=True), - "secondary": fields.Boolean(required=True) + "secondary": fields.Boolean(required=True), }, ) @@ -30,5 +30,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_identification_ = study_.study_identification return [s.to_dict() for s in study_identification_] - - diff --git a/model/dataset.py b/model/dataset.py index 8899299e..cc6c1695 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -13,6 +13,7 @@ def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) self.created_at = datetime.now() + __tablename__ = "dataset" id = db.Column(db.CHAR(36), primary_key=True) updated_on = db.Column(db.DateTime, nullable=False) @@ -21,22 +22,34 @@ def __init__(self, study): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="dataset") - dataset_contributors = db.relationship("DatasetContributor", back_populates="dataset") - dataset_versions = db.relationship("DatasetVersion", back_populates="dataset", lazy="dynamic") + dataset_contributors = db.relationship( + "DatasetContributor", back_populates="dataset" + ) + dataset_versions = db.relationship( + "DatasetVersion", back_populates="dataset", lazy="dynamic" + ) dataset_access = db.relationship("DatasetAccess", back_populates="dataset") dataset_consent = db.relationship("DatasetConsent", back_populates="dataset") dataset_date = db.relationship("DatasetDate", back_populates="dataset") - dataset_de_ident_level = db.relationship("DatasetDeIdentLevel", back_populates="dataset") - dataset_description = db.relationship("DatasetDescription", back_populates="dataset") + dataset_de_ident_level = db.relationship( + "DatasetDeIdentLevel", back_populates="dataset" + ) + dataset_description = db.relationship( + "DatasetDescription", back_populates="dataset" + ) dataset_funder = db.relationship("DatasetFunder", back_populates="dataset") dataset_identifier = db.relationship("DatasetIdentifier", back_populates="dataset") - dataset_managing_organization = db.relationship("DatasetManagingOrganization", back_populates="dataset") + dataset_managing_organization = db.relationship( + "DatasetManagingOrganization", back_populates="dataset" + ) dataset_other = db.relationship("DatasetOther", back_populates="dataset") dataset_readme = db.relationship("DatasetReadme", back_populates="dataset") dataset_record_keys = db.relationship("DatasetRecordKeys", back_populates="dataset") - dataset_related_item = db.relationship("DatasetRelatedItem", back_populates="dataset") + dataset_related_item = db.relationship( + "DatasetRelatedItem", back_populates="dataset" + ) dataset_rights = db.relationship("DatasetRights", back_populates="dataset") dataset_subject = db.relationship("DatasetSubject", back_populates="dataset") dataset_title = db.relationship("DatasetTitle", back_populates="dataset") @@ -50,7 +63,7 @@ def to_dict(self): "updated_on": str(datetime.now()), "created_at": str(datetime.now()), # "dataset_versions": [i.to_dict() for i in self.dataset_versions], - "latest_version": last_published.id if last_published else None + "latest_version": last_published.id if last_published else None, } def last_published(self): diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 1f2efca6..a56abadf 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -8,6 +8,7 @@ class StudyAvailableIpd(db.Model): def __init__(self): self.id = str(uuid.uuid4()) + __tablename__ = "study_available_ipd" id = db.Column(db.CHAR(36), primary_key=True) From c66d803f224d447aa8a1573bd6b4bf78b23b94f6 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 21 Aug 2023 18:08:51 -0700 Subject: [PATCH 032/505] fix: metadata location and link --- apis/study_metadata/study_intervention.py | 36 +++++++++++++++++++++++ apis/study_metadata/study_ipdsharing.py | 35 ++++++++++++++++++++++ apis/study_metadata/study_link.py | 32 ++++++++++++++++++++ apis/study_metadata/study_location.py | 35 ++++++++++++++++++++++ 4 files changed, 138 insertions(+) create mode 100644 apis/study_metadata/study_intervention.py create mode 100644 apis/study_metadata/study_ipdsharing.py create mode 100644 apis/study_metadata/study_link.py create mode 100644 apis/study_metadata/study_location.py diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py new file mode 100644 index 00000000..b7df4bde --- /dev/null +++ b/apis/study_metadata/study_intervention.py @@ -0,0 +1,36 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("intervention", description="study operations", path="/") + + +study_intervention = api.model( + "StudyIntervention", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "name": fields.String(required=True), + "description": fields.String(required=True), + "arm_group_label_list": fields.List(fields.String, required=True), + "other_name_list": fields.List(fields.String, required=True) + + + }, +) + + +@api.route("/study//metadata/intervention") +class StudyInterventionResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_intervention) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_intervention_ = study_.study_intervention + return [s.to_dict() for s in study_intervention_] + + diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py new file mode 100644 index 00000000..3e0cbb28 --- /dev/null +++ b/apis/study_metadata/study_ipdsharing.py @@ -0,0 +1,35 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("ipdsharing", description="study operations", path="/") + + +study_ipdsharing = api.model( + "StudyIpdsharing", + { + "id": fields.String(required=True), + "ipd_sharing": fields.String(required=True), + "ipd_sharing_description": fields.String(required=True), + "ipd_sharing_info_type_list": fields.List(fields.String, required=True), + "ipd_sharing_time_frame": fields.String(required=True), + "ipd_sharing_access_criteria": fields.String(required=True), + "ipd_sharing_url": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/ipdsharing") +class StudyIpdsharingResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_ipdsharing) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_ipdsharing_ = study_.study_ipdsharing + return [s.to_dict() for s in study_ipdsharing_] + + diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py new file mode 100644 index 00000000..101b9f92 --- /dev/null +++ b/apis/study_metadata/study_link.py @@ -0,0 +1,32 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("link", description="study operations", path="/") + + +study_link = api.model( + "StudyLink", + { + "id": fields.String(required=True), + "url": fields.String(required=True), + "title": fields.String(required=True), + + }, +) + + +@api.route("/study//metadata/link") +class StudyLinkResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_link) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_link_ = study_.study_link + return [s.to_dict() for s in study_link_] + + diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py new file mode 100644 index 00000000..e7b8a8c2 --- /dev/null +++ b/apis/study_metadata/study_location.py @@ -0,0 +1,35 @@ +from model import Study + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("location", description="study operations", path="/") + + +study_location = api.model( + "StudyLocation", + { + "id": fields.String(required=True), + "facility": fields.String(required=True), + "status": fields.String(required=True), + "city": fields.String(required=True), + "state": fields.String(required=True), + "zip": fields.String(required=True), + "country": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/location") +class StudyLocationResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_location) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_location_ = study_.study_location + return [s.to_dict() for s in study_location_] + + From 9c32d54792517a7a70ec00c7645150c5e4b92811 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 21 Aug 2023 19:06:08 -0700 Subject: [PATCH 033/505] fix: study metadata --- apis/__init__.py | 10 +++--- apis/study_metadata/study_other.py | 31 ++++++++++++++++ apis/study_metadata/study_overall_official.py | 31 ++++++++++++++++ apis/study_metadata/study_reference.py | 31 ++++++++++++++++ .../study_sponsors_collaborators.py | 35 +++++++++++++++++++ apis/study_metadata/study_status.py | 33 +++++++++++++++++ 6 files changed, 166 insertions(+), 5 deletions(-) create mode 100644 apis/study_metadata/study_other.py create mode 100644 apis/study_metadata/study_overall_official.py create mode 100644 apis/study_metadata/study_reference.py create mode 100644 apis/study_metadata/study_sponsors_collaborators.py create mode 100644 apis/study_metadata/study_status.py diff --git a/apis/__init__.py b/apis/__init__.py index 61d9308b..f438f045 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -66,11 +66,11 @@ def get(self): api.add_namespace(ipdsharing) api.add_namespace(link) api.add_namespace(location) -# api.add_namespace(other) -# api.add_namespace(overall_official) -# api.add_namespace(reference) -# api.add_namespace(sponsors_collaborator) -# api.add_namespace(status) +api.add_namespace(other) +api.add_namespace(overall_official) +api.add_namespace(reference) +api.add_namespace(sponsors_collaborator) +api.add_namespace(status) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py new file mode 100644 index 00000000..327bd377 --- /dev/null +++ b/apis/study_metadata/study_other.py @@ -0,0 +1,31 @@ +from flask_restx import Namespace, Resource, fields +from model import Study + +api = Namespace("other", description="study operations", path="/") + + +study_other = api.model( + "StudyOther", + { + "id": fields.String(required=True), + "oversight_has_dmc": fields.String(required=True), + "conditions": fields.String(required=True), + "keywords": fields.String(required=True), + "size": fields.Integer(required=True) + }, +) + + +@api.route("/study//metadata/other") +class StudyOtherResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_other) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_other_ = study_.study_other + return [s.to_dict() for s in study_other_] + + diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py new file mode 100644 index 00000000..bc84fe80 --- /dev/null +++ b/apis/study_metadata/study_overall_official.py @@ -0,0 +1,31 @@ +from flask_restx import Namespace, Resource, fields +from model import Study + +api = Namespace("overall_official", description="study operations", path="/") + + +study_overall_official = api.model( + "StudyOverallOfficial", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/overall_official") +class StudyOverallOfficialResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_overall_official) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_overall_official_ = study_.study_overall_official + return [s.to_dict() for s in study_overall_official_] + + diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py new file mode 100644 index 00000000..cc15a775 --- /dev/null +++ b/apis/study_metadata/study_reference.py @@ -0,0 +1,31 @@ +from flask_restx import Namespace, Resource, fields +from model import Study + +api = Namespace("reference", description="study operations", path="/") + + +study_reference = api.model( + "StudyReference", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "type": fields.Boolean(required=True), + "title": fields.String(required=True), + "citation": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/reference") +class StudyReferenceResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_reference) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_reference_ = study_.study_reference + return [s.to_dict() for s in study_reference_] + + diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py new file mode 100644 index 00000000..eeae4cf6 --- /dev/null +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -0,0 +1,35 @@ +from flask_restx import Namespace, Resource, fields +from model import Study + +api = Namespace("sponsors_collaborators", description="study operations", path="/") + + +study_sponsors_collaborators = api.model( + "StudySponsorsCollaborators", + { + "id": fields.String(required=True), + "responsible_party_type": fields.String(required=True), + "responsible_party_investigator_first_name": fields.String(required=True), + "responsible_party_investigator_last_name": fields.String(required=True), + "responsible_party_investigator_title": fields.String(required=True), + "responsible_party_investigator_affiliation": fields.String(required=True), + "lead_sponsor_first_name": fields.String(required=True), + "lead_sponsor_last_name": fields.String(required=True), + "collaborator_name": fields.List(fields.String, required=True), + }, +) + + +@api.route("/study//metadata/sponsors_collaborators") +class StudyStatusResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_sponsors_collaborators) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_sponsors_collaborators_ = study_.sponsors_collaborators + return [s.to_dict() for s in study_sponsors_collaborators_] + + diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py new file mode 100644 index 00000000..7df6b686 --- /dev/null +++ b/apis/study_metadata/study_status.py @@ -0,0 +1,33 @@ +from flask_restx import Namespace, Resource, fields +from model import Study + +api = Namespace("status", description="study operations", path="/") + + +study_status = api.model( + "StudyStatus", + { + "id": fields.String(required=True), + "overall_status": fields.String(required=True), + "why_stopped": fields.String(required=True), + "start_date": fields.String(required=True), + "start_date_type": fields.String(required=True), + "completion_date": fields.String(required=True), + "completion_date_type": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/status") +class StudyStatusResource(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + @api.marshal_with(study_status) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_status_ = study_.study_status + return [s.to_dict() for s in study_status_] + + From e1e316494d340a199959651de6638495a47acb35 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 22 Aug 2023 02:07:25 +0000 Subject: [PATCH 034/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_intervention.py | 6 +----- apis/study_metadata/study_ipdsharing.py | 2 -- apis/study_metadata/study_link.py | 3 --- apis/study_metadata/study_location.py | 2 -- apis/study_metadata/study_other.py | 4 +--- apis/study_metadata/study_overall_official.py | 2 -- apis/study_metadata/study_reference.py | 2 -- apis/study_metadata/study_sponsors_collaborators.py | 2 -- apis/study_metadata/study_status.py | 2 -- 9 files changed, 2 insertions(+), 23 deletions(-) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index b7df4bde..a03658f5 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -14,9 +14,7 @@ "name": fields.String(required=True), "description": fields.String(required=True), "arm_group_label_list": fields.List(fields.String, required=True), - "other_name_list": fields.List(fields.String, required=True) - - + "other_name_list": fields.List(fields.String, required=True), }, ) @@ -32,5 +30,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_intervention_ = study_.study_intervention return [s.to_dict() for s in study_intervention_] - - diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 3e0cbb28..d4557ba8 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -31,5 +31,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_ipdsharing_ = study_.study_ipdsharing return [s.to_dict() for s in study_ipdsharing_] - - diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 101b9f92..53f25982 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -12,7 +12,6 @@ "id": fields.String(required=True), "url": fields.String(required=True), "title": fields.String(required=True), - }, ) @@ -28,5 +27,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_link_ = study_.study_link return [s.to_dict() for s in study_link_] - - diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index e7b8a8c2..eb6a0f7b 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -31,5 +31,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_location_ = study_.study_location return [s.to_dict() for s in study_location_] - - diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 327bd377..9bc633e1 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -11,7 +11,7 @@ "oversight_has_dmc": fields.String(required=True), "conditions": fields.String(required=True), "keywords": fields.String(required=True), - "size": fields.Integer(required=True) + "size": fields.Integer(required=True), }, ) @@ -27,5 +27,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_other_ = study_.study_other return [s.to_dict() for s in study_other_] - - diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index bc84fe80..efe11efe 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -27,5 +27,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_overall_official_ = study_.study_overall_official return [s.to_dict() for s in study_overall_official_] - - diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index cc15a775..efc846f2 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -27,5 +27,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_reference_ = study_.study_reference return [s.to_dict() for s in study_reference_] - - diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index eeae4cf6..dbadbc02 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -31,5 +31,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_sponsors_collaborators_ = study_.sponsors_collaborators return [s.to_dict() for s in study_sponsors_collaborators_] - - diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 7df6b686..79d53d2d 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -29,5 +29,3 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_status_ = study_.study_status return [s.to_dict() for s in study_status_] - - From 7bc357ab288ce7c6a2b485b8fc7427cfa1d22daa Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 21 Aug 2023 19:28:49 -0700 Subject: [PATCH 035/505] feat: added dataset metadata namespaces --- apis/__init__.py | 42 +++++++++++++++++++ apis/dataset_metadata/dataset_consent.py | 34 +++++++++++++++ .../dataset_contributor_affiliation.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_date.py | 34 +++++++++++++++ .../dataset_de_ident_level.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_description.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_funder.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_identifier.py | 34 +++++++++++++++ .../dataset_managing_organization.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_other.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_readme.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_record_keys.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_related_item.py | 34 +++++++++++++++ .../dataset_related_item_contributor.py | 34 +++++++++++++++ .../dataset_related_item_identifier.py | 34 +++++++++++++++ .../dataset_related_item_other.py | 34 +++++++++++++++ .../dataset_related_item_title.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_rights.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_subject.py | 34 +++++++++++++++ apis/dataset_metadata/dataset_title.py | 34 +++++++++++++++ apis/study_metadata/study_design.py | 4 +- .../study_sponsors_collaborators.py | 2 +- 22 files changed, 691 insertions(+), 3 deletions(-) create mode 100644 apis/dataset_metadata/dataset_consent.py create mode 100644 apis/dataset_metadata/dataset_contributor_affiliation.py create mode 100644 apis/dataset_metadata/dataset_date.py create mode 100644 apis/dataset_metadata/dataset_de_ident_level.py create mode 100644 apis/dataset_metadata/dataset_description.py create mode 100644 apis/dataset_metadata/dataset_funder.py create mode 100644 apis/dataset_metadata/dataset_identifier.py create mode 100644 apis/dataset_metadata/dataset_managing_organization.py create mode 100644 apis/dataset_metadata/dataset_other.py create mode 100644 apis/dataset_metadata/dataset_readme.py create mode 100644 apis/dataset_metadata/dataset_record_keys.py create mode 100644 apis/dataset_metadata/dataset_related_item.py create mode 100644 apis/dataset_metadata/dataset_related_item_contributor.py create mode 100644 apis/dataset_metadata/dataset_related_item_identifier.py create mode 100644 apis/dataset_metadata/dataset_related_item_other.py create mode 100644 apis/dataset_metadata/dataset_related_item_title.py create mode 100644 apis/dataset_metadata/dataset_rights.py create mode 100644 apis/dataset_metadata/dataset_subject.py create mode 100644 apis/dataset_metadata/dataset_title.py diff --git a/apis/__init__.py b/apis/__init__.py index 92100319..ec8f88cd 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -25,6 +25,27 @@ from .study_metadata.study_status import api as status +from .dataset_metadata.dataset_consent import api as dataset_consent +from .dataset_metadata.dataset_subject import api as subject +from .dataset_metadata.dataset_description import api as description +from .dataset_metadata.dataset_identifier import api as identifier +from .dataset_metadata.dataset_other import api as other +from .dataset_metadata.dataset_contributor_affiliation import api as contributor_affiliation +from .dataset_metadata.dataset_date import api as date +from .dataset_metadata.dataset_de_ident_level import api as de_ident_level +from .dataset_metadata.dataset_subject import api as subject +from .dataset_metadata.dataset_managing_organization import api as managing_organization +from .dataset_metadata.dataset_readme import api as readme +from .dataset_metadata.dataset_record_keys import api as record_keys +from .dataset_metadata.dataset_rights import api as rights +from .dataset_metadata.dataset_title import api as title +from .dataset_metadata.dataset_related_item import api as related_item +from .dataset_metadata.dataset_related_item_title import api as related_item_title +from .dataset_metadata.dataset_related_item_contributor import api as related_item_contributor +from .dataset_metadata.dataset_related_item_identifier import api as related_item_identifier +from .dataset_metadata.dataset_related_item_other import api as related_item_other + + api = Api( title="FAIRHUB", description="The backend api system for the fairhub vue app", @@ -64,3 +85,24 @@ def get(self): api.add_namespace(reference) api.add_namespace(sponsors_collaborator) api.add_namespace(status) + + +api.add_namespace(dataset_consent) +api.add_namespace(subject) +api.add_namespace(description) +api.add_namespace(identifier) +api.add_namespace(other) +# api.add_namespace(contributor_affiliation) +# api.add_namespace(date) +# api.add_namespace(de_ident_level) +# api.add_namespace(subject) +# api.add_namespace(managing_organization) +# api.add_namespace(readme) +# api.add_namespace(record_keys) +# api.add_namespace(rights) +# api.add_namespace(title) +# api.add_namespace(related_item) +# api.add_namespace(related_item_title) +# api.add_namespace(related_item_contributor) +# api.add_namespace(related_item_identifier) +# api.add_namespace(related_item_other) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_consent.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_contributor_affiliation.py b/apis/dataset_metadata/dataset_contributor_affiliation.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_contributor_affiliation.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_date.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_description.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_funder.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_identifier.py b/apis/dataset_metadata/dataset_identifier.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_identifier.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_other.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_readme.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_related_item.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_related_item_other.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_related_item_title.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_rights.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_subject.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py new file mode 100644 index 00000000..5e8e20dc --- /dev/null +++ b/apis/dataset_metadata/dataset_title.py @@ -0,0 +1,34 @@ +from model import Dataset + +from flask_restx import Namespace, Resource, fields + + +api = Namespace("consent", description="dataset operations", path="/") + +dataset_consent = api.model( + "StudyContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "affiliation": fields.String(required=True), + "role": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + "central_contact": fields.Boolean(required=True), + }, +) + + +@api.route("/study//metadata/contact") +class StudyContactResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_consent) + def get(self, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + return [d.to_dict() for s in dataset_consent_] diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 8f743b2d..097781d8 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -25,7 +25,7 @@ "bio_spec_retention": fields.String(required=True), "bio_spec_description": fields.String(required=True), "target_duration": fields.String(required=True), - "number_groups_cohorts": fields.Integer(required=True), + "number_groups_cohorts": fields.Integer(required=True) }, ) @@ -36,7 +36,7 @@ class StudyDesignResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - @api.marshal_with(study_design) + #@api.marshal_with(study_design) def get(self, study_id: int): study_ = Study.query.get(study_id) study_design_ = study_.study_design diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index eeae4cf6..28658764 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -29,7 +29,7 @@ class StudyStatusResource(Resource): @api.marshal_with(study_sponsors_collaborators) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_sponsors_collaborators_ = study_.sponsors_collaborators + study_sponsors_collaborators_ = study_.study_sponsors_collaborators return [s.to_dict() for s in study_sponsors_collaborators_] From 5376891ace32fee924110835706aa3f22b723ac8 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 24 Aug 2023 09:39:45 -0700 Subject: [PATCH 036/505] feat: added dataset metadata GET functions --- apis/__init__.py | 24 ++++------ apis/dataset_metadata/dataset_consent.py | 36 ++++++++------ .../dataset_contributor_affiliation.py | 34 ------------- apis/dataset_metadata/dataset_date.py | 30 +++++------- .../dataset_de_ident_level.py | 33 +++++++------ apis/dataset_metadata/dataset_description.py | 29 +++++------ apis/dataset_metadata/dataset_funder.py | 34 ++++++------- apis/dataset_metadata/dataset_identifier.py | 28 +++++------ .../dataset_managing_organization.py | 29 +++++------ apis/dataset_metadata/dataset_other.py | 32 ++++++------- apis/dataset_metadata/dataset_readme.py | 28 +++++------ apis/dataset_metadata/dataset_record_keys.py | 48 ++++++++++++------- apis/dataset_metadata/dataset_related_item.py | 29 +++++------ .../dataset_related_item_contributor.py | 37 +++++++------- .../dataset_related_item_identifier.py | 37 +++++++------- .../dataset_related_item_other.py | 37 +++++++------- .../dataset_related_item_title.py | 37 +++++++------- apis/dataset_metadata/dataset_rights.py | 31 ++++++------ apis/dataset_metadata/dataset_subject.py | 31 ++++++------ apis/dataset_metadata/dataset_title.py | 10 ++-- model/dataset_metadata/dataset_consent.py | 27 ++++++----- model/dataset_metadata/dataset_readme.py | 2 +- model/dataset_metadata/dataset_record_keys.py | 16 ++++--- 23 files changed, 303 insertions(+), 376 deletions(-) delete mode 100644 apis/dataset_metadata/dataset_contributor_affiliation.py diff --git a/apis/__init__.py b/apis/__init__.py index ec8f88cd..46819fdb 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -29,11 +29,9 @@ from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_identifier import api as identifier -from .dataset_metadata.dataset_other import api as other -from .dataset_metadata.dataset_contributor_affiliation import api as contributor_affiliation +from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_date import api as date from .dataset_metadata.dataset_de_ident_level import api as de_ident_level -from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_managing_organization import api as managing_organization from .dataset_metadata.dataset_readme import api as readme from .dataset_metadata.dataset_record_keys import api as record_keys @@ -91,17 +89,15 @@ def get(self): api.add_namespace(subject) api.add_namespace(description) api.add_namespace(identifier) -api.add_namespace(other) -# api.add_namespace(contributor_affiliation) -# api.add_namespace(date) -# api.add_namespace(de_ident_level) -# api.add_namespace(subject) -# api.add_namespace(managing_organization) -# api.add_namespace(readme) -# api.add_namespace(record_keys) -# api.add_namespace(rights) -# api.add_namespace(title) -# api.add_namespace(related_item) +api.add_namespace(dataset_other) +api.add_namespace(date) +api.add_namespace(de_ident_level) +api.add_namespace(managing_organization) +api.add_namespace(readme) +api.add_namespace(record_keys) +api.add_namespace(rights) +api.add_namespace(title) +api.add_namespace(related_item) # api.add_namespace(related_item_title) # api.add_namespace(related_item_contributor) # api.add_namespace(related_item_identifier) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 5e8e20dc..6ab5266b 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,34 +1,42 @@ -from model import Dataset +from model import Dataset, DatasetConsent, db from flask_restx import Namespace, Resource, fields +from flask import jsonify, request api = Namespace("consent", description="dataset operations", path="/") dataset_consent = api.model( - "StudyContact", + "DatasetConsent", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "type": fields.String(required=True), + "noncommercial": fields.Boolean(required=True), + "geog_restrict": fields.Boolean(required=True), + "research_type": fields.Boolean(required=True), + "genetic_only": fields.Boolean(required=True), + "no_methods": fields.Boolean(required=True), + "details": fields.String(required=True), }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/consent") +class DatasetConsentResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + return [d.to_dict() for d in dataset_consent_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_consent_ = DatasetConsent.from_data(data_obj, data) + db.session.add(dataset_consent_) + db.session.commit() + return jsonify(dataset_consent_.to_dict()) \ No newline at end of file diff --git a/apis/dataset_metadata/dataset_contributor_affiliation.py b/apis/dataset_metadata/dataset_contributor_affiliation.py deleted file mode 100644 index 5e8e20dc..00000000 --- a/apis/dataset_metadata/dataset_contributor_affiliation.py +++ /dev/null @@ -1,34 +0,0 @@ -from model import Dataset - -from flask_restx import Namespace, Resource, fields - - -api = Namespace("consent", description="dataset operations", path="/") - -dataset_consent = api.model( - "StudyContact", - { - "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), - }, -) - - -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): - @api.doc("dataset") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 5e8e20dc..781d8a70 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -3,32 +3,28 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("date", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_date = api.model( + "DatasetDate", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "date": fields.String(required=True), + "date_type": fields.String(required=True), + "data_information": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/date") +class DatasetDateResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_date) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_date_ = dataset_.dataset_date + return [d.to_dict() for d in dataset_date_] diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 5e8e20dc..aae6fc6e 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -3,32 +3,31 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("date", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +de_ident_level = api.model( + "DatasetDeIdentLevel", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "type": fields.String(required=True), + "direct": fields.Boolean(required=True), + "hipaa": fields.Boolean(required=True), + "dates": fields.Boolean(required=True), + "nonarr": fields.Boolean(required=True), + "k_anon": fields.Boolean(required=True), + "details-": fields.String(required=True), }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/de_ident_level") +class DatasetDeIdentLevelResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(de_ident_level) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + de_ident_level_ = dataset_.dataset_de_ident_level + return [d.to_dict() for d in de_ident_level_] diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 5e8e20dc..f72cb0fe 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -3,32 +3,27 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("description", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_description = api.model( + "DatasetDescription", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "description": fields.String(required=True), + "description_type": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/description") +class DatasetDescriptionResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_description) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_description_ = dataset_.dataset_description + return [d.to_dict() for d in dataset_description_] diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 5e8e20dc..b50eca65 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -3,32 +3,32 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("description", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_funder = api.model( + "DatasetFunder", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + "award_number": fields.String(required=True), + "award_uri": fields.String(required=True), + "award_title": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/funder") +class DatasetFunderResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_funder) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_funder_ = dataset_.dataset_funder + return [d.to_dict() for d in dataset_funder_] diff --git a/apis/dataset_metadata/dataset_identifier.py b/apis/dataset_metadata/dataset_identifier.py index 5e8e20dc..bef25d78 100644 --- a/apis/dataset_metadata/dataset_identifier.py +++ b/apis/dataset_metadata/dataset_identifier.py @@ -3,32 +3,26 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("description", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_description = api.model( + "DatasetDescription", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "description": fields.String(required=True), + "description_type": fields.String(required=True), }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/description") +class DatasetDescriptionResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_description) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_description_ = dataset_.dataset_description + return [d.to_dict() for d in dataset_description_] diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 5e8e20dc..8f5991db 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -3,32 +3,27 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("managing_organization", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +managing_organization = api.model( + "DatasetManagingOrganization", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "name": fields.String(required=True), + "ror_id": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/managing_organization") +class DatasetManagingOrganizationResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(managing_organization) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + managing_organization_ = dataset_.dataset_managing_organization + return [d.to_dict() for d in managing_organization_] diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 5e8e20dc..d17a0eb9 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -3,32 +3,30 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("dataset_other", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_other = api.model( + "DatasetOther", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "language": fields.String(required=True), + "managing_organization_name": fields.String(required=True), + "managing_organization_ror_id": fields.String(required=True), + "size": fields.List(fields.String, required=True), + "standards_followed": fields.String(required=True), + "acknowledgement": fields.String(required=True), }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/other") +class DatasetOtherResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_other) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_other_ = dataset_.dataset_other + return [d.to_dict() for d in dataset_other_] diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index 5e8e20dc..d56054c2 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -3,32 +3,26 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("readme", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_readme = api.model( + "DatasetReadme", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "content": fields.Boolean(required=True) + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/readme") +class DatasetDateResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_readme) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_readme_ = dataset_.dataset_readme + return [d.to_dict() for d in dataset_readme_] diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 5e8e20dc..9e8e26df 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,34 +1,46 @@ -from model import Dataset +from model import Dataset, DatasetRecordKeys, db from flask_restx import Namespace, Resource, fields +from flask import jsonify, request -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("record_keys", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_record_keys = api.model( + "DatasetRecordKeys", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "key_type": fields.String(required=True), + "key_details": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/record_keys") +class DatasetRecordKeysResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_record_keys) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_record_keys_ = dataset_.dataset_record_keys + return [d.to_dict() for d in dataset_record_keys_] + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_request_keys_ = DatasetRecordKeys.from_data(data_obj, data) + db.session.add(dataset_request_keys_) + db.session.commit() + return dataset_request_keys_.to_dict() + + @api.route("/study//dataset//metadata/record_keys/") + class DatasetRecordKeysUpdate(Resource): + def put(self, study_id: int, dataset_id: int, record_key_id: int): + data = request.json + dataset_request_keys_ = DatasetRecordKeys.query.get(record_key_id) + dataset_request_keys_.update(request.json) + db.session.commit() + return dataset_request_keys_.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 5e8e20dc..edd7bd91 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -3,32 +3,27 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("related_item", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_related_item = api.model( + "DatasetRelatedItem", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "type": fields.String(required=True), + "relation_type": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/related_item") +class DatasetRelatedItemResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_related_item) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_related_item_ = dataset_.dataset_related_item + return [d.to_dict() for d in dataset_related_item_] diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py index 5e8e20dc..1ba7d2e3 100644 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -3,32 +3,27 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("related_item_contributor", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", - { - "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), - }, -) +# dataset_related_item_contributor = api.model( +# "DatasetRelatedItemContributor", +# { +# "id": fields.String(required=True), +# "type": fields.String(required=True), +# "relation_type": fields.String(required=True), +# +# }, +# ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/related_item_identifier") +class DatasetRelatedItemContributorResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + # @api.marshal_with(dataset_related_item_contributor) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_related_item_contributor_ = dataset_.dataset_related_item_contributor + return [d.to_dict() for d in dataset_related_item_contributor_] diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py index 5e8e20dc..a4801c35 100644 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -3,32 +3,27 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("related_item", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", - { - "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), - }, -) +# dataset_related_item_contributor = api.model( +# "DatasetRelatedItemContributor", +# { +# "id": fields.String(required=True), +# "type": fields.String(required=True), +# "relation_type": fields.String(required=True), +# +# }, +# ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/related_item_contributor") +class DatasetRelatedItemContributorResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + # @api.marshal_with(dataset_related_item_contributor) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_related_item_ = dataset_.dataset_related_item + return [d.to_dict() for d in dataset_related_item_] diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py index 5e8e20dc..c795dce5 100644 --- a/apis/dataset_metadata/dataset_related_item_other.py +++ b/apis/dataset_metadata/dataset_related_item_other.py @@ -3,32 +3,27 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("related_item_other", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", - { - "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), - }, -) +# dataset_related_item_contributor = api.model( +# "DatasetRelatedItemContributor", +# { +# "id": fields.String(required=True), +# "type": fields.String(required=True), +# "relation_type": fields.String(required=True), +# +# }, +# ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/related_item_other") +class DatasetRelatedItemContributorResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + # @api.marshal_with(dataset_related_item_contributor) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_related_item_ = dataset_.dataset_related_item + return [d.to_dict() for d in dataset_related_item_] diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py index 5e8e20dc..3ac570f8 100644 --- a/apis/dataset_metadata/dataset_related_item_title.py +++ b/apis/dataset_metadata/dataset_related_item_title.py @@ -3,32 +3,27 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("related_item_title", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", - { - "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), - }, -) +# dataset_related_item_contributor = api.model( +# "DatasetRelatedItemTitle", +# { +# "id": fields.String(required=True), +# "type": fields.String(required=True), +# "relation_type": fields.String(required=True), +# +# }, +# ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/related_item_title") +class DatasetRelatedItemTitleResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + # @api.marshal_with(dataset_related_item_contributor) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_related_item_title_ = dataset_.dataset_related_item_title + return [d.to_dict() for d in dataset_related_item_title_] diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 5e8e20dc..be868d59 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -3,32 +3,29 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("dataset_rights", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_rights = api.model( + "DatasetRights", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "rights": fields.String(required=True), + "uri": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_scheme": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/rights") +class DatasetRightsResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_rights) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_rights_ = dataset_.dataset_rights + return [d.to_dict() for d in dataset_rights_] diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 5e8e20dc..aa8cad52 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -3,32 +3,29 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("description", description="dataset operations", path="/") -dataset_consent = api.model( - "StudyContact", +dataset_subject = api.model( + "DatasetSubject", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "subject": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "value_uri": fields.String(required=True), + "classification_code": fields.String(required=True), }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/subject") +class DatasetSubjectResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) - def get(self, dataset_id: int): + @api.marshal_with(dataset_subject) + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_subject_ = dataset_.dataset_subject + return [d.to_dict() for d in dataset_subject_] diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 5e8e20dc..62f588db 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -3,9 +3,9 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("consent", description="dataset operations", path="/") +api = Namespace("title", description="dataset operations", path="/") -dataset_consent = api.model( +dataset_title = api.model( "StudyContact", { "id": fields.String(required=True), @@ -27,8 +27,8 @@ class StudyContactResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_consent) + @api.marshal_with(dataset_title) def get(self, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for s in dataset_consent_] + dataset_title_ = dataset_.dataset_title + return [d.to_dict() for d in dataset_title_] diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 127c0ad9..6ac8c6e7 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -3,7 +3,8 @@ class DatasetConsent(db.Model): - def __init__(self): + def __init__(self, dataset): + self.dataset = dataset self.id = str(uuid.uuid4()) __tablename__ = "dataset_consent" @@ -23,7 +24,7 @@ def __init__(self): def to_dict(self): return { "id": self.id, - "type": self.destypecription, + "type": self.type, "noncommercial": self.noncommercial, "geog_restrict": self.geog_restrict, "research_type": self.research_type, @@ -33,14 +34,16 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_consent = DatasetConsent() - dataset_consent.type = data["type"] - dataset_consent.noncommercial = data["noncommercial"] - dataset_consent.geog_restrict = data["geog_restrict"] - dataset_consent.research_type = data["research_type"] - dataset_consent.genetic_only = data["genetic_only"] - dataset_consent.no_methods = data["no_methods"] - dataset_consent.details = data["details"] - + def from_data(dataset, data: dict): + dataset_consent = DatasetConsent(dataset) + dataset_consent.update(data) return dataset_consent + + def update(self, data): + self.type = data["type"] + self.noncommercial = data["noncommercial"] + self.geog_restrict = data["geog_restrict"] + self.research_type = data["research_type"] + self.genetic_only = data["genetic_only"] + self.no_methods = data["no_methods"] + self.details = data["details"] diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index ccf6e41e..4c04e9f3 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -16,7 +16,7 @@ def __init__(self): def to_dict(self): return { "id": self.id, - "content": self.noncommercial, + "content": self.content, } @staticmethod diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 09f4ed84..c96f8cee 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -3,9 +3,9 @@ class DatasetRecordKeys(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_record_keys" id = db.Column(db.CHAR(36), primary_key=True) key_type = db.Column(db.String, nullable=False) @@ -22,9 +22,11 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_record_keys = DatasetRecordKeys() - - dataset_record_keys.key_type = data["key_type"] - dataset_record_keys.key_details = data["key_details"] + def from_data(dataset, data: dict): + dataset_record_keys = DatasetRecordKeys(dataset) + dataset_record_keys.update(data) return dataset_record_keys + + def update(self, data): + self.key_type = data["key_type"] + self.key_details = data["key_details"] From 8e602b432054771bf807b7a64f0fa568cb6d936f Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 24 Aug 2023 16:40:28 +0000 Subject: [PATCH 037/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 8 ++++++-- apis/dataset_metadata/dataset_consent.py | 2 +- apis/dataset_metadata/dataset_date.py | 1 - apis/dataset_metadata/dataset_description.py | 1 - apis/dataset_metadata/dataset_funder.py | 1 - apis/dataset_metadata/dataset_managing_organization.py | 1 - apis/dataset_metadata/dataset_readme.py | 6 +----- apis/dataset_metadata/dataset_record_keys.py | 6 ++++-- apis/dataset_metadata/dataset_related_item.py | 1 - apis/dataset_metadata/dataset_rights.py | 1 - apis/study_metadata/study_design.py | 4 ++-- model/dataset_metadata/dataset_record_keys.py | 1 + 12 files changed, 15 insertions(+), 18 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 46819fdb..e839a10a 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -39,8 +39,12 @@ from .dataset_metadata.dataset_title import api as title from .dataset_metadata.dataset_related_item import api as related_item from .dataset_metadata.dataset_related_item_title import api as related_item_title -from .dataset_metadata.dataset_related_item_contributor import api as related_item_contributor -from .dataset_metadata.dataset_related_item_identifier import api as related_item_identifier +from .dataset_metadata.dataset_related_item_contributor import ( + api as related_item_contributor, +) +from .dataset_metadata.dataset_related_item_identifier import ( + api as related_item_identifier, +) from .dataset_metadata.dataset_related_item_other import api as related_item_other diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 6ab5266b..7876da80 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -39,4 +39,4 @@ def post(self, study_id: int, dataset_id: int): dataset_consent_ = DatasetConsent.from_data(data_obj, data) db.session.add(dataset_consent_) db.session.commit() - return jsonify(dataset_consent_.to_dict()) \ No newline at end of file + return jsonify(dataset_consent_.to_dict()) diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 781d8a70..8b47af07 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -12,7 +12,6 @@ "date": fields.String(required=True), "date_type": fields.String(required=True), "data_information": fields.String(required=True), - }, ) diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index f72cb0fe..bef25d78 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -11,7 +11,6 @@ "id": fields.String(required=True), "description": fields.String(required=True), "description_type": fields.String(required=True), - }, ) diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index b50eca65..5d1b382b 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -16,7 +16,6 @@ "award_number": fields.String(required=True), "award_uri": fields.String(required=True), "award_title": fields.String(required=True), - }, ) diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 8f5991db..c5367319 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -11,7 +11,6 @@ "id": fields.String(required=True), "name": fields.String(required=True), "ror_id": fields.String(required=True), - }, ) diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index d56054c2..cb2d9f32 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -7,11 +7,7 @@ dataset_readme = api.model( "DatasetReadme", - { - "id": fields.String(required=True), - "content": fields.Boolean(required=True) - - }, + {"id": fields.String(required=True), "content": fields.Boolean(required=True)}, ) diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 9e8e26df..ebc42a71 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -12,7 +12,6 @@ "id": fields.String(required=True), "key_type": fields.String(required=True), "key_details": fields.String(required=True), - }, ) @@ -28,6 +27,7 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_record_keys_ = dataset_.dataset_record_keys return [d.to_dict() for d in dataset_record_keys_] + def post(self, study_id: int, dataset_id: int): data = request.json data_obj = Dataset.query.get(dataset_id) @@ -36,7 +36,9 @@ def post(self, study_id: int, dataset_id: int): db.session.commit() return dataset_request_keys_.to_dict() - @api.route("/study//dataset//metadata/record_keys/") + @api.route( + "/study//dataset//metadata/record_keys/" + ) class DatasetRecordKeysUpdate(Resource): def put(self, study_id: int, dataset_id: int, record_key_id: int): data = request.json diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index edd7bd91..dcacddce 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -11,7 +11,6 @@ "id": fields.String(required=True), "type": fields.String(required=True), "relation_type": fields.String(required=True), - }, ) diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index be868d59..a348c179 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -13,7 +13,6 @@ "uri": fields.String(required=True), "identifier": fields.String(required=True), "identifier_scheme": fields.String(required=True), - }, ) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 097781d8..f2ee2694 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -25,7 +25,7 @@ "bio_spec_retention": fields.String(required=True), "bio_spec_description": fields.String(required=True), "target_duration": fields.String(required=True), - "number_groups_cohorts": fields.Integer(required=True) + "number_groups_cohorts": fields.Integer(required=True), }, ) @@ -36,7 +36,7 @@ class StudyDesignResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - #@api.marshal_with(study_design) + # @api.marshal_with(study_design) def get(self, study_id: int): study_ = Study.query.get(study_id) study_design_ = study_.study_design diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index c96f8cee..d84d1447 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -6,6 +6,7 @@ class DatasetRecordKeys(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_record_keys" id = db.Column(db.CHAR(36), primary_key=True) key_type = db.Column(db.String, nullable=False) From ca015d8d0ea3a24dae07006046bc6a042afe12d1 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 24 Aug 2023 15:23:01 -0700 Subject: [PATCH 038/505] feat: added POST/PUT to dataset metadata --- apis/__init__.py | 10 +++- apis/dataset_metadata/dataset_access.py | 49 +++++++++++++++++++ apis/dataset_metadata/dataset_consent.py | 10 +++- apis/dataset_metadata/dataset_date.py | 19 ++++++- .../dataset_de_ident_level.py | 22 +++++++-- apis/dataset_metadata/dataset_description.py | 19 ++++++- apis/dataset_metadata/dataset_funder.py | 21 +++++++- apis/dataset_metadata/dataset_identifier.py | 43 +++++++++++----- .../dataset_managing_organization.py | 19 ++++++- apis/dataset_metadata/dataset_other.py | 20 +++++++- apis/dataset_metadata/dataset_readme.py | 24 +++++++-- apis/dataset_metadata/dataset_record_keys.py | 13 ++--- apis/dataset_metadata/dataset_related_item.py | 20 +++++++- .../dataset_related_item_contributor.py | 4 +- .../dataset_related_item_identifier.py | 3 +- .../dataset_related_item_other.py | 4 +- .../dataset_related_item_title.py | 3 +- apis/dataset_metadata/dataset_rights.py | 20 +++++++- apis/dataset_metadata/dataset_subject.py | 20 +++++++- apis/dataset_metadata/dataset_title.py | 38 +++++++++----- model/dataset_metadata/dataset_access.py | 22 +++++---- .../dataset_contributor_affiliation.py | 19 ++++--- model/dataset_metadata/dataset_date.py | 17 ++++--- .../dataset_de_ident_level.py | 24 ++++----- model/dataset_metadata/dataset_description.py | 17 ++++--- model/dataset_metadata/dataset_funder.py | 26 +++++----- model/dataset_metadata/dataset_identifier.py | 19 ++++--- .../dataset_managing_organization.py | 15 +++--- model/dataset_metadata/dataset_other.py | 25 ++++++---- model/dataset_metadata/dataset_readme.py | 17 ++++--- .../dataset_metadata/dataset_related_item.py | 16 +++--- .../dataset_related_item_contributor.py | 20 +++++--- .../dataset_related_item_identifier.py | 2 + .../dataset_related_item_other.py | 29 ++++++----- .../dataset_related_item_title.py | 13 +++-- model/dataset_metadata/dataset_rights.py | 19 ++++--- model/dataset_metadata/dataset_subject.py | 21 ++++---- model/dataset_metadata/dataset_title.py | 17 ++++--- 38 files changed, 523 insertions(+), 196 deletions(-) create mode 100644 apis/dataset_metadata/dataset_access.py diff --git a/apis/__init__.py b/apis/__init__.py index 46819fdb..57224934 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -24,8 +24,10 @@ from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator from .study_metadata.study_status import api as status +from .dataset_metadata_namespace import api as dataset_metadata_namespace -from .dataset_metadata.dataset_consent import api as dataset_consent +from .dataset_metadata.dataset_access import api as access +from .dataset_metadata.dataset_consent import api as consent from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_identifier import api as identifier @@ -42,6 +44,7 @@ from .dataset_metadata.dataset_related_item_contributor import api as related_item_contributor from .dataset_metadata.dataset_related_item_identifier import api as related_item_identifier from .dataset_metadata.dataset_related_item_other import api as related_item_other +from .dataset_metadata.dataset_funder import api as funder api = Api( @@ -60,6 +63,7 @@ def get(self): return "Server active!" +#api.add_namespace(dataset_metadata_namespace) api.add_namespace(cats_api) api.add_namespace(study_api) @@ -85,7 +89,9 @@ def get(self): api.add_namespace(status) -api.add_namespace(dataset_consent) +api.add_namespace(access) +api.add_namespace(funder) +api.add_namespace(consent) api.add_namespace(subject) api.add_namespace(description) api.add_namespace(identifier) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py new file mode 100644 index 00000000..0af60fdb --- /dev/null +++ b/apis/dataset_metadata/dataset_access.py @@ -0,0 +1,49 @@ +from model import Dataset, DatasetAccess, db + +from flask_restx import Namespace, Resource, fields +from flask import jsonify, request +# from ..dataset_metadata_namespace import api + +api = Namespace("access", description="dataset operations", path="/") + +dataset_access = api.model( + "DatasetAccess", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "description": fields.String(required=True), + "url": fields.String(required=True), + "url_last_checked": fields.String(required=True), + + }, +) + + +@api.route("/study//dataset//metadata/access") +class DatasetAccessResource(Resource): + @api.doc("dataset") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_access) + def get(self, study_id: int, dataset_id: int): + dataset_ = Dataset.query.get(dataset_id) + dataset_access_ = dataset_.dataset_access + return [d.to_dict() for d in dataset_access_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_request_keys_ = DatasetAccess.from_data(data_obj, data) + db.session.add(dataset_request_keys_) + db.session.commit() + return dataset_request_keys_.to_dict() + + @api.route("/study//dataset//metadata/access/") + class DatasetAccessUpdate(Resource): + def put(self, study_id: int, dataset_id: int, access_id: int): + data = request.json + dataset_access_ = DatasetAccess.query.get(access_id) + dataset_access_.update(request.json) + db.session.commit() + return dataset_access_.to_dict() diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 6ab5266b..a91da035 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -39,4 +39,12 @@ def post(self, study_id: int, dataset_id: int): dataset_consent_ = DatasetConsent.from_data(data_obj, data) db.session.add(dataset_consent_) db.session.commit() - return jsonify(dataset_consent_.to_dict()) \ No newline at end of file + return dataset_consent_.to_dict() + + @api.route("/study//dataset//metadata/consent/") + class DatasetAccessUpdate(Resource): + def put(self, study_id: int, dataset_id: int, consent_id: int): + dataset_consent_ = DatasetConsent.query.get(consent_id) + dataset_consent_.update(request.json) + db.session.commit() + return dataset_consent_.to_dict() diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 781d8a70..fee86f25 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,6 +1,7 @@ -from model import Dataset +from model import Dataset, db, DatasetDate from flask_restx import Namespace, Resource, fields +from flask import jsonify, request api = Namespace("date", description="dataset operations", path="/") @@ -28,3 +29,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_date_ = dataset_.dataset_date return [d.to_dict() for d in dataset_date_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_date_ = DatasetDate.from_data(data_obj, data) + db.session.add(dataset_date_) + db.session.commit() + return dataset_date_.to_dict() + + @api.route("/study//dataset//metadata/date/") + class DatasetDateUpdate(Resource): + def put(self, study_id: int, dataset_id: int, date_id: int): + dataset_date_ = DatasetDate.query.get(date_id) + dataset_date_.update(request.json) + db.session.commit() + return dataset_date_.to_dict() diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index aae6fc6e..f1cbaa53 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -1,7 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetDeIdentLevel, db from flask_restx import Namespace, Resource, fields - +from flask import request api = Namespace("date", description="dataset operations", path="/") @@ -15,7 +15,7 @@ "dates": fields.Boolean(required=True), "nonarr": fields.Boolean(required=True), "k_anon": fields.Boolean(required=True), - "details-": fields.String(required=True), + "details": fields.String(required=True), }, ) @@ -31,3 +31,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) de_ident_level_ = dataset_.dataset_de_ident_level return [d.to_dict() for d in de_ident_level_] + + def post(self, study_id: int, dataset_id: int): + data=request.json + data_obj = Dataset.query.get(dataset_id) + de_ident_level_ = DatasetDeIdentLevel.from_data(data_obj, data) + db.session.add(de_ident_level_) + db.session.commit() + return de_ident_level_.to_dict() + + @api.route("/study//dataset//metadata/de_ident_level/") + class DatasetDatasetDeIdentLevelUpdate(Resource): + def put(self, study_id: int, dataset_id: int, de_ident_level_id: int): + de_ident_level_ = DatasetDeIdentLevel.query.get(de_ident_level_id) + de_ident_level_.update(request.json) + db.session.commit() + return de_ident_level_.to_dict() diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index f72cb0fe..c70534f4 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -1,6 +1,7 @@ -from model import Dataset +from model import Dataset, db, DatasetDescription from flask_restx import Namespace, Resource, fields +from flask import request api = Namespace("description", description="dataset operations", path="/") @@ -27,3 +28,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_description_ = dataset_.dataset_description return [d.to_dict() for d in dataset_description_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_description_ = DatasetDescription.from_data(data_obj, data) + db.session.add(dataset_description_) + db.session.commit() + return dataset_description_.to_dict() + + @api.route("/study//dataset//metadata/description/") + class DatasetDescriptionUpdate(Resource): + def put(self, study_id: int, dataset_id: int, description_id: int): + dataset_description_ = DatasetDescription.query.get(description_id) + dataset_description_.update(request.json) + db.session.commit() + return dataset_description_.to_dict() diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index b50eca65..0fd5c3a4 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,7 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetFunder, db from flask_restx import Namespace, Resource, fields - +from flask import request api = Namespace("description", description="dataset operations", path="/") @@ -32,3 +32,20 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_funder_ = dataset_.dataset_funder return [d.to_dict() for d in dataset_funder_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_funder_ = DatasetFunder.from_data(data_obj, data) + db.session.add(dataset_funder_) + db.session.commit() + return dataset_funder_.to_dict() + + +@api.route("/study//dataset//metadata/funder/") +class DatasetFunderUpdate(Resource): + def put(self, study_id: int, dataset_id: int, funder_id: int): + dataset_funder_ = DatasetFunder.query.get(funder_id) + dataset_funder_.update(request.json) + db.session.commit() + return dataset_funder_.to_dict() diff --git a/apis/dataset_metadata/dataset_identifier.py b/apis/dataset_metadata/dataset_identifier.py index bef25d78..ef589b0b 100644 --- a/apis/dataset_metadata/dataset_identifier.py +++ b/apis/dataset_metadata/dataset_identifier.py @@ -1,28 +1,45 @@ -from model import Dataset - +from model import Dataset, db, DatasetIdentifier from flask_restx import Namespace, Resource, fields +from flask import request +api = Namespace("identifier", description="dataset operations", path="/") -api = Namespace("description", description="dataset operations", path="/") - -dataset_description = api.model( - "DatasetDescription", +dataset_identifier = api.model( + "DatasetIdentifier", { "id": fields.String(required=True), - "description": fields.String(required=True), - "description_type": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "alternate": fields.Boolean(required=True), + }, ) -@api.route("/study//dataset//metadata/description") -class DatasetDescriptionResource(Resource): +@api.route("/study//dataset//metadata/identifier") +class DatasetIdentifierResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_description) + @api.marshal_with(dataset_identifier) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_description_ = dataset_.dataset_description - return [d.to_dict() for d in dataset_description_] + dataset_identifier_ = dataset_.dataset_identifier + return [d.to_dict() for d in dataset_identifier_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_identifier_ = DatasetIdentifier.from_data(data_obj, data) + db.session.add(dataset_identifier_) + db.session.commit() + return dataset_identifier_.to_dict() + + @api.route("/study//dataset//metadata/identifier/") + class DatasetIdentifierUpdate(Resource): + def put(self, study_id: int, dataset_id: int, identifier_id: int): + dataset_identifier_ = DatasetIdentifier.query.get(identifier_id) + dataset_identifier_.update(request.json) + db.session.commit() + return dataset_identifier_.to_dict() diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 8f5991db..3b035e4a 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -1,4 +1,5 @@ -from model import Dataset +from model import Dataset, db, DatasetManagingOrganization +from flask import request from flask_restx import Namespace, Resource, fields @@ -27,3 +28,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) managing_organization_ = dataset_.dataset_managing_organization return [d.to_dict() for d in managing_organization_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + managing_organization_ = DatasetManagingOrganization.from_data(data_obj, data) + db.session.add(managing_organization_) + db.session.commit() + return managing_organization_.to_dict() + + @api.route("/study//dataset//metadata/managing_organization/") + class DatasetManagingOrganizationUpdate(Resource): + def put(self, study_id: int, dataset_id: int, managing_organization_id: int): + managing_organization_ = DatasetManagingOrganization.query.get(managing_organization_id) + managing_organization_.update(request.json) + db.session.commit() + return managing_organization_.to_dict() diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index d17a0eb9..f9b41222 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,8 +1,8 @@ -from model import Dataset +from model import Dataset, db, DatasetOther +from flask import request from flask_restx import Namespace, Resource, fields - api = Namespace("dataset_other", description="dataset operations", path="/") dataset_other = api.model( @@ -30,3 +30,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other return [d.to_dict() for d in dataset_other_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_other_ = DatasetOther.from_data(data_obj, data) + db.session.add(dataset_other_) + db.session.commit() + return dataset_other_.to_dict() + + @api.route("/study//dataset//metadata/other/") + class DatasetOtherUpdate(Resource): + def put(self, study_id: int, dataset_id: int, other_id: int): + dataset_other_ = DatasetOther.query.get(other_id) + dataset_other_.update(request.json) + db.session.commit() + return dataset_other_.to_dict() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index d56054c2..cf9a74ca 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -1,22 +1,22 @@ -from model import Dataset +from model import Dataset, db, DatasetReadme +from flask import request from flask_restx import Namespace, Resource, fields - api = Namespace("readme", description="dataset operations", path="/") dataset_readme = api.model( "DatasetReadme", { "id": fields.String(required=True), - "content": fields.Boolean(required=True) + "content": fields.String(required=True) }, ) @api.route("/study//dataset//metadata/readme") -class DatasetDateResource(Resource): +class DatasetReadmeResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -26,3 +26,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_readme_ = dataset_.dataset_readme return [d.to_dict() for d in dataset_readme_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_readme_ = DatasetReadme.from_data(data_obj, data) + db.session.add(dataset_readme_) + db.session.commit() + return dataset_readme_.to_dict() + + @api.route("/study//dataset//metadata/readme/") + class DatasetReadmeUpdate(Resource): + def put(self, study_id: int, dataset_id: int, readme_id: int): + dataset_readme_ = DatasetReadme.query.get(readme_id) + dataset_readme_.update(request.json) + db.session.commit() + return dataset_readme_.to_dict() diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 9e8e26df..180908a1 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -28,19 +28,20 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_record_keys_ = dataset_.dataset_record_keys return [d.to_dict() for d in dataset_record_keys_] + def post(self, study_id: int, dataset_id: int): data = request.json data_obj = Dataset.query.get(dataset_id) - dataset_request_keys_ = DatasetRecordKeys.from_data(data_obj, data) - db.session.add(dataset_request_keys_) + dataset_record_keys_ = DatasetRecordKeys.from_data(data_obj, data) + db.session.add(dataset_record_keys_) db.session.commit() - return dataset_request_keys_.to_dict() + return dataset_record_keys_.to_dict() @api.route("/study//dataset//metadata/record_keys/") class DatasetRecordKeysUpdate(Resource): def put(self, study_id: int, dataset_id: int, record_key_id: int): data = request.json - dataset_request_keys_ = DatasetRecordKeys.query.get(record_key_id) - dataset_request_keys_.update(request.json) + dataset_record_keys_ = DatasetRecordKeys.query.get(record_key_id) + dataset_record_keys_.update(request.json) db.session.commit() - return dataset_request_keys_.to_dict() + return dataset_record_keys_.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index edd7bd91..52b9a6e6 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,6 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetRelatedItem, db from flask_restx import Namespace, Resource, fields +from flask import jsonify, request api = Namespace("related_item", description="dataset operations", path="/") @@ -27,3 +28,20 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_related_item_ = dataset_.dataset_related_item return [d.to_dict() for d in dataset_related_item_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_related_item_ = DatasetRelatedItem.from_data(data_obj, data) + db.session.add(dataset_related_item_) + db.session.commit() + return dataset_related_item_.to_dict() + + @api.route("/study//dataset//metadata/related_item/") + class DatasetRelatedItemUpdate(Resource): + def put(self, study_id: int, dataset_id: int, related_item_id: int): + data = request.json + dataset_related_item_ = DatasetRelatedItem.query.get(related_item_id) + dataset_related_item_.update(request.json) + db.session.commit() + return dataset_related_item_.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py index 1ba7d2e3..b3f1699c 100644 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -1,7 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetRelatedItemContributor, db from flask_restx import Namespace, Resource, fields - +from flask import jsonify, request api = Namespace("related_item_contributor", description="dataset operations", path="/") diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py index a4801c35..004f6be9 100644 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -1,6 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetRelatedItemIdentifier, db from flask_restx import Namespace, Resource, fields +from flask import jsonify, request api = Namespace("related_item", description="dataset operations", path="/") diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py index c795dce5..d800cf45 100644 --- a/apis/dataset_metadata/dataset_related_item_other.py +++ b/apis/dataset_metadata/dataset_related_item_other.py @@ -1,7 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetRelatedItemOther, db from flask_restx import Namespace, Resource, fields - +from flask import jsonify, request api = Namespace("related_item_other", description="dataset operations", path="/") diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py index 3ac570f8..427e1b7f 100644 --- a/apis/dataset_metadata/dataset_related_item_title.py +++ b/apis/dataset_metadata/dataset_related_item_title.py @@ -1,6 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetRelatedItemTitle, db from flask_restx import Namespace, Resource, fields +from flask import jsonify, request api = Namespace("related_item_title", description="dataset operations", path="/") diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index be868d59..ad604aeb 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -1,7 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetRights, db from flask_restx import Namespace, Resource, fields - +from flask import jsonify, request api = Namespace("dataset_rights", description="dataset operations", path="/") @@ -29,3 +29,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_rights_ = dataset_.dataset_rights return [d.to_dict() for d in dataset_rights_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_rights_ = DatasetRights.from_data(data_obj, data) + db.session.add(dataset_rights_) + db.session.commit() + return dataset_rights_.to_dict() + + @api.route("/study//dataset//metadata/rights/") + class DatasetRightsUpdate(Resource): + def put(self, study_id: int, dataset_id: int, rights_id: int): + dataset_rights_ = DatasetRights.query.get(rights_id) + dataset_rights_.update(request.json) + db.session.commit() + return dataset_rights_.to_dict() diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index aa8cad52..131bde5e 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -1,7 +1,7 @@ -from model import Dataset +from model import Dataset, DatasetSubject, db from flask_restx import Namespace, Resource, fields - +from flask import jsonify, request api = Namespace("description", description="dataset operations", path="/") @@ -29,3 +29,19 @@ def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_subject_ = dataset_.dataset_subject return [d.to_dict() for d in dataset_subject_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_subject_ = DatasetSubject.from_data(data_obj, data) + db.session.add(dataset_subject_) + db.session.commit() + return dataset_subject_.to_dict() + + @api.route("/study//dataset//metadata/subject/") + class DatasetSubjectUpdate(Resource): + def put(self, study_id: int, dataset_id: int, subject_id: int): + dataset_subject_ = DatasetSubject.query.get(subject_id) + dataset_subject_.update(request.json) + db.session.commit() + return dataset_subject_.to_dict() diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 62f588db..4257c5de 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,34 +1,46 @@ -from model import Dataset +from model import Dataset, DatasetTitle, db from flask_restx import Namespace, Resource, fields +from flask import jsonify, request api = Namespace("title", description="dataset operations", path="/") dataset_title = api.model( - "StudyContact", + "DatasetTitle", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), + "title": fields.String(required=True), + "type": fields.String(required=True), + }, ) -@api.route("/study//metadata/contact") -class StudyContactResource(Resource): +@api.route("/study//dataset//metadata/title") +class DatasetTitleResource(Resource): @api.doc("dataset") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_title) - def get(self, dataset_id: int): + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_title_ = dataset_.dataset_title return [d.to_dict() for d in dataset_title_] + + def post(self, study_id: int, dataset_id: int): + data = request.json + data_obj = Dataset.query.get(dataset_id) + dataset_title_ = DatasetTitle.from_data(data_obj, data) + db.session.add(dataset_title_) + db.session.commit() + return dataset_title_.to_dict() + + @api.route("/study//dataset//metadata/title/") + class DatasetTitleUpdate(Resource): + def put(self, study_id: int, dataset_id: int, title_id: int): + dataset_title_ = DatasetTitle.query.get(title_id) + dataset_title_.update(request.json) + db.session.commit() + return dataset_title_.to_dict() diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 1ecf9139..dced314d 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -3,9 +3,9 @@ class DatasetAccess(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_access" id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) @@ -19,18 +19,20 @@ def __init__(self): def to_dict(self): return { "id": self.id, - "type": self.destypecription, + "type": self.type, "description": self.description, "url": self.url, "url_last_checked": self.url_last_checked, } @staticmethod - def from_data(data: dict): - dataset_access = DatasetAccess() - - dataset_access.description = data["description"] - dataset_access.url = data["url"] - dataset_access.url_last_checked = data["url_last_checked"] - dataset_access.type = data["type"] + def from_data(dataset, data: dict): + dataset_access = DatasetAccess(dataset) + dataset_access.update(data) return dataset_access + + def update(self, data): + self.description = data["description"] + self.url = data["url"] + self.url_last_checked = data["url_last_checked"] + self.type = data["type"] diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index 0cd6d703..83812dac 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -3,9 +3,9 @@ class DatasetContributorAffiliation(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_contributor_affiliation" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) @@ -27,9 +27,14 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_contributor = DatasetContributorAffiliation() - dataset_contributor.name_identifier = data["identifier"] - dataset_contributor.name_identifier_scheme = data["identifier_scheme"] - dataset_contributor.name_identifier_scheme_uri = data["identifier_scheme_uri"] + def from_data(dataset, data: dict): + dataset_contributor = DatasetContributorAffiliation(dataset) + dataset_contributor.update(data) return dataset_contributor + + + def update(self, data): + self.identifier = data["identifier"] + self.identifier_scheme = data["identifier_scheme"] + self.identifier_scheme_uri = data["identifier_scheme_uri"] + diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index bfba862f..5b150716 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -3,8 +3,9 @@ class DatasetDate(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) + self.dataset = dataset __tablename__ = "dataset_date" id = db.Column(db.CHAR(36), primary_key=True) @@ -24,9 +25,13 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_date = DatasetDate() - dataset_date.date = data["date"] - dataset_date.date_type = data["date_type"] - dataset_date.data_information = data["data_information"] + def from_data(dataset, data: dict): + dataset_date = DatasetDate(dataset) + dataset_date.update(data) return dataset_date + + def update(self, data): + self.date = data["date"] + self.date_type = data["date_type"] + self.data_information = data["data_information"] + diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index 53697d5e..b82d621e 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -3,8 +3,9 @@ class DatasetDeIdentLevel(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) + self.dataset = dataset __tablename__ = "dataset_de_ident_level" id = db.Column(db.CHAR(36), primary_key=True) @@ -33,14 +34,15 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_de_ident_level = DatasetDeIdentLevel() - dataset_de_ident_level.type = data["type"] - dataset_de_ident_level.direct = data["direct"] - dataset_de_ident_level.hipaa = data["hipaa"] - dataset_de_ident_level.dates = data["dates"] - dataset_de_ident_level.nonarr = data["nonarr"] - dataset_de_ident_level.k_anon = data["k_anon"] - dataset_de_ident_level.details = data["details"] - + def from_data(dataset, data: dict): + dataset_de_ident_level = DatasetDeIdentLevel(dataset) + dataset_de_ident_level.update(data) return dataset_de_ident_level + def update(self, data): + self.type = data["type"] + self.direct = data["direct"] + self.hipaa = data["hipaa"] + self.dates = data["dates"] + self.nonarr = data["nonarr"] + self.k_anon = data["k_anon"] + self.details = data["details"] diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 5c9ea55d..8a24f929 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -3,8 +3,9 @@ class DatasetDescription(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) + self.dataset = dataset __tablename__ = "dataset_description" id = db.Column(db.CHAR(36), primary_key=True) @@ -22,9 +23,13 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_description = DatasetDescription() - dataset_description.description = data["description"] - dataset_description.description_type = data["description_type"] - dataset_description.name_identifier_scheme_uri = data["identifier_scheme_uri"] + def from_data(dataset,data: dict): + dataset_description = DatasetDescription(dataset) + dataset_description.update(data) return dataset_description + + + def update(self, data): + self.description = data["description"] + self.description_type = data["description_type"] + diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index 0e07c22d..47b5287a 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -3,8 +3,9 @@ class DatasetFunder(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) + self.dataset = dataset __tablename__ = "dataset_funder" id = db.Column(db.CHAR(36), primary_key=True) @@ -32,14 +33,17 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_funder = DatasetFunder() - dataset_funder.name = data["name"] - dataset_funder.identifier = data["identifier"] - dataset_funder.identifier_type = data["identifier_type"] - dataset_funder.identifier_scheme_uri = data["identifier_scheme_uri"] - dataset_funder.award_number = data["award_number"] - dataset_funder.award_uri = data["award_uri"] - dataset_funder.award_title = data["award_title"] - + def from_data(dataset, data: dict): + dataset_funder = DatasetFunder(dataset) + dataset_funder.update(data) return dataset_funder + + def update(self, data): + self.name = data["name"] + self.identifier = data["identifier"] + self.identifier_type = data["identifier_type"] + self.identifier_scheme_uri = data["identifier_scheme_uri"] + self.award_number = data["award_number"] + self.award_uri = data["award_uri"] + self.award_title = data["award_title"] + diff --git a/model/dataset_metadata/dataset_identifier.py b/model/dataset_metadata/dataset_identifier.py index 823ac09c..e8225aab 100644 --- a/model/dataset_metadata/dataset_identifier.py +++ b/model/dataset_metadata/dataset_identifier.py @@ -3,9 +3,9 @@ class DatasetIdentifier(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_identifier" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) @@ -24,10 +24,13 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_date = DatasetIdentifier() - - dataset_date.identifier = data["identifier"] - dataset_date.identifier_type = data["identifier_type"] - dataset_date.alternate = data["alternate"] + def from_data(dataset, data: dict): + dataset_date = DatasetIdentifier(dataset) + dataset_date.update(data) return dataset_date + + def update(self, data): + self.identifier = data["identifier"] + self.identifier_type = data["identifier_type"] + self.alternate = data["alternate"] + diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py index 34699daa..c31c3d93 100644 --- a/model/dataset_metadata/dataset_managing_organization.py +++ b/model/dataset_metadata/dataset_managing_organization.py @@ -3,9 +3,9 @@ class DatasetManagingOrganization(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_managing_organization" id = db.Column(db.CHAR(36), primary_key=True) @@ -23,8 +23,11 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_managing_organization = DatasetManagingOrganization() - dataset_managing_organization.name = data["name"] - dataset_managing_organization.ror_id = data["ror_id"] + def from_data(dataset, data: dict): + dataset_managing_organization = DatasetManagingOrganization(dataset) + dataset_managing_organization.update(data) return dataset_managing_organization + + def update(self, data): + self.name = data["name"] + self.ror_id = data["ror_id"] diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 17b9a281..2355035a 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -5,9 +5,9 @@ class DatasetOther(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_other" id = db.Column(db.CHAR(36), primary_key=True) @@ -28,17 +28,22 @@ def to_dict(self): "managing_organization_name": self.managing_organization_name, "managing_organization_ror_id": self.managing_organization_ror_id, "standards_followed": self.managing_organization_ror_id, - "acknowledgement": self.size, + "acknowledgement": self.acknowledgement, "size": self.size, } @staticmethod - def from_data(data: dict): - dataset_other = DatasetOther() - dataset_other.language = data["language"] - dataset_other.managing_organization_name = data["managing_organization_name"] - dataset_other.managing_organization_ror_id = data[ + def from_data(dataset, data: dict): + dataset_other = DatasetOther(dataset) + dataset_other.update(data) + return dataset_other + + def update(self, data): + self.language = data["language"] + self.managing_organization_name = data["managing_organization_name"] + self.managing_organization_ror_id = data[ "managing_organization_ror_id" ] - dataset_other.size = data["size"] - return dataset_other + self.size = data["size"] + self.acknowledgement = data["acknowledgement"] + self.standards_followed = data["standards_followed"] diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index 4c04e9f3..edd8bbd8 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -3,12 +3,12 @@ class DatasetReadme(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_readme" id = db.Column(db.CHAR(36), primary_key=True) - content = db.Column(db.BOOLEAN, nullable=False) + content = db.Column(db.String, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship("Dataset", back_populates="dataset_readme") @@ -20,7 +20,12 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_readme = DatasetReadme() - dataset_readme.content = data["content"] + def from_data(dataset, data: dict): + dataset_readme = DatasetReadme(dataset) + dataset_readme.update(data) return dataset_readme + + def update(self, data): + self.content = data["content"] + + diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index ed60469d..e37e57d2 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -3,9 +3,9 @@ class DatasetRelatedItem(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_related_item" id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) @@ -34,8 +34,12 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_related_item = DatasetRelatedItem() - dataset_related_item.type = data["type"] - dataset_related_item.relation_type = data["relation_type"] + def from_data(dataset, data: dict): + dataset_related_item = DatasetRelatedItem(dataset) + dataset_related_item.update(data) return dataset_related_item + + def update(self, data): + self.type = data["type"] + self.relation_type = data["relation_type"] + diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index c22ed9e2..8d804d5d 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -3,9 +3,9 @@ class DatasetRelatedItemContributor(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_related_item_contributor" id = db.Column(db.CHAR(36), primary_key=True) name = db.Column(db.String, nullable=False) @@ -30,10 +30,14 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_related_contributor = DatasetRelatedItemContributor() - dataset_related_contributor.name = data["name"] - dataset_related_contributor.name_type = data["name_type"] - dataset_related_contributor.creator = data["creator"] - dataset_related_contributor.contributor_type = data["contributor_type"] + def from_data(dataset, data: dict): + dataset_related_contributor = DatasetRelatedItemContributor(dataset) + dataset_related_contributor.update(data) return dataset_related_contributor + + def update(self, data): + self.name = data["name"] + self.name_type = data["name_type"] + self.creator = data["creator"] + self.contributor_type = data["contributor_type"] + diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 7cc33d96..a3ad62a0 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -40,3 +40,5 @@ def from_data(data: dict): dataset_related_item_identifier.scheme_uri = data["scheme_uri"] dataset_related_item_identifier.scheme_type = data["scheme_type"] return dataset_related_item_identifier + + diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py index b9ac094e..3ffb3709 100644 --- a/model/dataset_metadata/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -3,9 +3,9 @@ class DatasetRelatedItemOther(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_related_item_other" id = db.Column(db.CHAR(36), primary_key=True) publication_year = db.Column(db.String, nullable=False) @@ -41,15 +41,18 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_related_item_other = DatasetRelatedItemOther() - dataset_related_item_other.publication_year = data["publication_year"] - dataset_related_item_other.volume = data["volume"] - dataset_related_item_other.issue = data["issue"] - dataset_related_item_other.number_value = data["number_value"] - dataset_related_item_other.number_type = data["number_type"] - dataset_related_item_other.first_page = data["first_page"] - dataset_related_item_other.last_page = data["last_page"] - dataset_related_item_other.publisher = data["publisher"] - dataset_related_item_other.edition = data["edition"] + def from_data(dataset, data: dict): + dataset_related_item_other = DatasetRelatedItemOther(dataset) + dataset_related_item_other.update(data) return dataset_related_item_other + + def update(self, data): + self.publication_year = data["publication_year"] + self.volume = data["volume"] + self.issue = data["issue"] + self.number_value = data["number_value"] + self.number_type = data["number_type"] + self.first_page = data["first_page"] + self.last_page = data["last_page"] + self.publisher = data["publisher"] + self.edition = data["edition"] diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 32aa37b1..14ec2bb4 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -3,9 +3,9 @@ class DatasetRelatedItemTitle(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_related_item_title" id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) @@ -26,8 +26,11 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(dataset, data: dict): dataset_related_item_title = DatasetRelatedItemTitle() - dataset_related_item_title.type = data["type"] - dataset_related_item_title.title = data["title"] + dataset_related_item_title.update(data) return dataset_related_item_title + + def update(self, data): + self.type = data["type"] + self.title = data["title"] diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 38963cb3..3cba1fdb 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -3,9 +3,9 @@ class DatasetRights(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_rights" id = db.Column(db.CHAR(36), primary_key=True) @@ -27,10 +27,13 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_rights = DatasetRights() - dataset_rights.rights = data["rights"] - dataset_rights.uri = data["uri"] - dataset_rights.identifier = data["identifier"] - dataset_rights.identifier_scheme = data["identifier_scheme"] + def from_data(dataset, data: dict): + dataset_rights = DatasetRights(dataset) + dataset_rights.update(data) return dataset_rights + + def update(self, data): + self.rights = data["rights"] + self.uri = data["uri"] + self.identifier = data["identifier"] + self.identifier_scheme = data["identifier_scheme"] diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 7714288b..0c8d18a4 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -3,9 +3,9 @@ class DatasetSubject(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_subject" id = db.Column(db.CHAR(36), primary_key=True) @@ -29,11 +29,14 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_subject = DatasetRights() - dataset_subject.subject = data["subject"] - dataset_subject.scheme = data["scheme"] - dataset_subject.scheme_uri = data["scheme_uri"] - dataset_subject.value_uri = data["value_uri"] - dataset_subject.classification_code = data["classification_code"] + def from_data(dataset, data: dict): + dataset_subject = DatasetSubject(dataset) + dataset_subject.update(data) return dataset_subject + + def update(self, data): + self.subject = data["subject"] + self.scheme = data["scheme"] + self.scheme_uri = data["scheme_uri"] + self.value_uri = data["value_uri"] + self.classification_code = data["classification_code"] diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index c0f8c61a..16afc373 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -3,13 +3,14 @@ class DatasetTitle(db.Model): - def __init__(self): + def __init__(self, dataset): self.id = str(uuid.uuid4()) - + self.dataset = dataset __tablename__ = "dataset_title" id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) type = db.Column(db.String, nullable=False) + dataset = db.relationship("Dataset", back_populates="dataset_title") dataset_id = db.Column(db.String, db.ForeignKey("dataset.id")) @@ -21,9 +22,13 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - dataset_title = DatasetTitle() + def from_data(dataset, data: dict): + dataset_title = DatasetTitle(dataset) + dataset_title.update(data) - dataset_title.title = data["title"] - dataset_title.type = data["type"] return dataset_title + + def update(self, data): + self.title = data["title"] + self.type = data["type"] + From d5d605a4405c64b27b5be6e4c9e3d79fa6f69db9 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 24 Aug 2023 22:24:09 +0000 Subject: [PATCH 039/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 3 ++- apis/dataset_metadata/dataset_access.py | 2 +- apis/dataset_metadata/dataset_de_ident_level.py | 6 ++++-- apis/dataset_metadata/dataset_description.py | 4 +++- apis/dataset_metadata/dataset_identifier.py | 5 +++-- apis/dataset_metadata/dataset_managing_organization.py | 8 ++++++-- apis/dataset_metadata/dataset_readme.py | 6 +----- apis/dataset_metadata/dataset_related_item.py | 4 +++- apis/dataset_metadata/dataset_title.py | 3 +-- model/dataset_metadata/dataset_access.py | 1 + model/dataset_metadata/dataset_contributor_affiliation.py | 3 +-- model/dataset_metadata/dataset_date.py | 1 - model/dataset_metadata/dataset_de_ident_level.py | 1 + model/dataset_metadata/dataset_description.py | 4 +--- model/dataset_metadata/dataset_funder.py | 1 - model/dataset_metadata/dataset_identifier.py | 2 +- model/dataset_metadata/dataset_managing_organization.py | 1 + model/dataset_metadata/dataset_other.py | 5 ++--- model/dataset_metadata/dataset_readme.py | 3 +-- model/dataset_metadata/dataset_related_item.py | 2 +- .../dataset_metadata/dataset_related_item_contributor.py | 2 +- model/dataset_metadata/dataset_related_item_identifier.py | 2 -- model/dataset_metadata/dataset_related_item_other.py | 1 + model/dataset_metadata/dataset_related_item_title.py | 1 + model/dataset_metadata/dataset_rights.py | 1 + model/dataset_metadata/dataset_subject.py | 1 + model/dataset_metadata/dataset_title.py | 2 +- 27 files changed, 40 insertions(+), 35 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 623cdf30..fa632c24 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -67,7 +67,8 @@ def get(self): return "Server active!" -#api.add_namespace(dataset_metadata_namespace) + +# api.add_namespace(dataset_metadata_namespace) api.add_namespace(cats_api) api.add_namespace(study_api) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index 0af60fdb..19c593b4 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -2,6 +2,7 @@ from flask_restx import Namespace, Resource, fields from flask import jsonify, request + # from ..dataset_metadata_namespace import api api = Namespace("access", description="dataset operations", path="/") @@ -14,7 +15,6 @@ "description": fields.String(required=True), "url": fields.String(required=True), "url_last_checked": fields.String(required=True), - }, ) diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index f1cbaa53..eede59fe 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -33,14 +33,16 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in de_ident_level_] def post(self, study_id: int, dataset_id: int): - data=request.json + data = request.json data_obj = Dataset.query.get(dataset_id) de_ident_level_ = DatasetDeIdentLevel.from_data(data_obj, data) db.session.add(de_ident_level_) db.session.commit() return de_ident_level_.to_dict() - @api.route("/study//dataset//metadata/de_ident_level/") + @api.route( + "/study//dataset//metadata/de_ident_level/" + ) class DatasetDatasetDeIdentLevelUpdate(Resource): def put(self, study_id: int, dataset_id: int, de_ident_level_id: int): de_ident_level_ = DatasetDeIdentLevel.query.get(de_ident_level_id) diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 1842470e..68cd2db5 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -36,7 +36,9 @@ def post(self, study_id: int, dataset_id: int): db.session.commit() return dataset_description_.to_dict() - @api.route("/study//dataset//metadata/description/") + @api.route( + "/study//dataset//metadata/description/" + ) class DatasetDescriptionUpdate(Resource): def put(self, study_id: int, dataset_id: int, description_id: int): dataset_description_ = DatasetDescription.query.get(description_id) diff --git a/apis/dataset_metadata/dataset_identifier.py b/apis/dataset_metadata/dataset_identifier.py index ef589b0b..bb3beda7 100644 --- a/apis/dataset_metadata/dataset_identifier.py +++ b/apis/dataset_metadata/dataset_identifier.py @@ -11,7 +11,6 @@ "identifier": fields.String(required=True), "identifier_type": fields.String(required=True), "alternate": fields.Boolean(required=True), - }, ) @@ -36,7 +35,9 @@ def post(self, study_id: int, dataset_id: int): db.session.commit() return dataset_identifier_.to_dict() - @api.route("/study//dataset//metadata/identifier/") + @api.route( + "/study//dataset//metadata/identifier/" + ) class DatasetIdentifierUpdate(Resource): def put(self, study_id: int, dataset_id: int, identifier_id: int): dataset_identifier_ = DatasetIdentifier.query.get(identifier_id) diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index b65c74e3..d93c8f84 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -36,10 +36,14 @@ def post(self, study_id: int, dataset_id: int): db.session.commit() return managing_organization_.to_dict() - @api.route("/study//dataset//metadata/managing_organization/") + @api.route( + "/study//dataset//metadata/managing_organization/" + ) class DatasetManagingOrganizationUpdate(Resource): def put(self, study_id: int, dataset_id: int, managing_organization_id: int): - managing_organization_ = DatasetManagingOrganization.query.get(managing_organization_id) + managing_organization_ = DatasetManagingOrganization.query.get( + managing_organization_id + ) managing_organization_.update(request.json) db.session.commit() return managing_organization_.to_dict() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index cf9a74ca..0c19f112 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -7,11 +7,7 @@ dataset_readme = api.model( "DatasetReadme", - { - "id": fields.String(required=True), - "content": fields.String(required=True) - - }, + {"id": fields.String(required=True), "content": fields.String(required=True)}, ) diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 9712d344..7deb3264 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -36,7 +36,9 @@ def post(self, study_id: int, dataset_id: int): db.session.commit() return dataset_related_item_.to_dict() - @api.route("/study//dataset//metadata/related_item/") + @api.route( + "/study//dataset//metadata/related_item/" + ) class DatasetRelatedItemUpdate(Resource): def put(self, study_id: int, dataset_id: int, related_item_id: int): data = request.json diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 4257c5de..3412adb1 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -12,7 +12,6 @@ "id": fields.String(required=True), "title": fields.String(required=True), "type": fields.String(required=True), - }, ) @@ -24,7 +23,7 @@ class DatasetTitleResource(Resource): @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_title) - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) dataset_title_ = dataset_.dataset_title return [d.to_dict() for d in dataset_title_] diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index dced314d..6507cf8a 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -6,6 +6,7 @@ class DatasetAccess(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_access" id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index 83812dac..eb9dff57 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -6,6 +6,7 @@ class DatasetContributorAffiliation(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_contributor_affiliation" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) @@ -32,9 +33,7 @@ def from_data(dataset, data: dict): dataset_contributor.update(data) return dataset_contributor - def update(self, data): self.identifier = data["identifier"] self.identifier_scheme = data["identifier_scheme"] self.identifier_scheme_uri = data["identifier_scheme_uri"] - diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index 5b150716..a46127bb 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -34,4 +34,3 @@ def update(self, data): self.date = data["date"] self.date_type = data["date_type"] self.data_information = data["data_information"] - diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index b82d621e..53bf32e9 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -38,6 +38,7 @@ def from_data(dataset, data: dict): dataset_de_ident_level = DatasetDeIdentLevel(dataset) dataset_de_ident_level.update(data) return dataset_de_ident_level + def update(self, data): self.type = data["type"] self.direct = data["direct"] diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 8a24f929..65eb2f96 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -23,13 +23,11 @@ def to_dict(self): } @staticmethod - def from_data(dataset,data: dict): + def from_data(dataset, data: dict): dataset_description = DatasetDescription(dataset) dataset_description.update(data) return dataset_description - def update(self, data): self.description = data["description"] self.description_type = data["description_type"] - diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index 47b5287a..fd0206b0 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -46,4 +46,3 @@ def update(self, data): self.award_number = data["award_number"] self.award_uri = data["award_uri"] self.award_title = data["award_title"] - diff --git a/model/dataset_metadata/dataset_identifier.py b/model/dataset_metadata/dataset_identifier.py index e8225aab..572cba5f 100644 --- a/model/dataset_metadata/dataset_identifier.py +++ b/model/dataset_metadata/dataset_identifier.py @@ -6,6 +6,7 @@ class DatasetIdentifier(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_identifier" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) @@ -33,4 +34,3 @@ def update(self, data): self.identifier = data["identifier"] self.identifier_type = data["identifier_type"] self.alternate = data["alternate"] - diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py index c31c3d93..80eae34c 100644 --- a/model/dataset_metadata/dataset_managing_organization.py +++ b/model/dataset_metadata/dataset_managing_organization.py @@ -6,6 +6,7 @@ class DatasetManagingOrganization(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_managing_organization" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 2355035a..f33d24a5 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -8,6 +8,7 @@ class DatasetOther(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_other" id = db.Column(db.CHAR(36), primary_key=True) @@ -41,9 +42,7 @@ def from_data(dataset, data: dict): def update(self, data): self.language = data["language"] self.managing_organization_name = data["managing_organization_name"] - self.managing_organization_ror_id = data[ - "managing_organization_ror_id" - ] + self.managing_organization_ror_id = data["managing_organization_ror_id"] self.size = data["size"] self.acknowledgement = data["acknowledgement"] self.standards_followed = data["standards_followed"] diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index edd8bbd8..3e815908 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -6,6 +6,7 @@ class DatasetReadme(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_readme" id = db.Column(db.CHAR(36), primary_key=True) content = db.Column(db.String, nullable=False) @@ -27,5 +28,3 @@ def from_data(dataset, data: dict): def update(self, data): self.content = data["content"] - - diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index e37e57d2..a42580af 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -6,6 +6,7 @@ class DatasetRelatedItem(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_related_item" id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) @@ -42,4 +43,3 @@ def from_data(dataset, data: dict): def update(self, data): self.type = data["type"] self.relation_type = data["relation_type"] - diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index 8d804d5d..4a7388ec 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -6,6 +6,7 @@ class DatasetRelatedItemContributor(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_related_item_contributor" id = db.Column(db.CHAR(36), primary_key=True) name = db.Column(db.String, nullable=False) @@ -40,4 +41,3 @@ def update(self, data): self.name_type = data["name_type"] self.creator = data["creator"] self.contributor_type = data["contributor_type"] - diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index a3ad62a0..7cc33d96 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -40,5 +40,3 @@ def from_data(data: dict): dataset_related_item_identifier.scheme_uri = data["scheme_uri"] dataset_related_item_identifier.scheme_type = data["scheme_type"] return dataset_related_item_identifier - - diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py index 3ffb3709..0312985e 100644 --- a/model/dataset_metadata/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -6,6 +6,7 @@ class DatasetRelatedItemOther(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_related_item_other" id = db.Column(db.CHAR(36), primary_key=True) publication_year = db.Column(db.String, nullable=False) diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 14ec2bb4..92efeb4e 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -6,6 +6,7 @@ class DatasetRelatedItemTitle(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_related_item_title" id = db.Column(db.CHAR(36), primary_key=True) type = db.Column(db.String, nullable=False) diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 3cba1fdb..da63b24c 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -6,6 +6,7 @@ class DatasetRights(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_rights" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 0c8d18a4..e42c61c4 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -6,6 +6,7 @@ class DatasetSubject(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_subject" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index 16afc373..f6e367e9 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -6,6 +6,7 @@ class DatasetTitle(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + __tablename__ = "dataset_title" id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) @@ -31,4 +32,3 @@ def from_data(dataset, data: dict): def update(self, data): self.title = data["title"] self.type = data["type"] - From 9816d69ec625e4914c21f03f78f199489804c134 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 24 Aug 2023 17:35:18 -0700 Subject: [PATCH 040/505] fix: replaced namespaces with union Dataset metadata name --- apis/__init__.py | 96 +++++++++---------- apis/cats.py | 36 ------- apis/dataset_metadata/dataset_access.py | 7 +- apis/dataset_metadata/dataset_consent.py | 7 +- apis/dataset_metadata/dataset_date.py | 6 +- .../dataset_de_ident_level.py | 4 +- apis/dataset_metadata/dataset_description.py | 4 +- apis/dataset_metadata/dataset_funder.py | 4 +- apis/dataset_metadata/dataset_identifier.py | 4 +- .../dataset_managing_organization.py | 4 +- apis/dataset_metadata/dataset_other.py | 2 +- apis/dataset_metadata/dataset_readme.py | 2 +- apis/dataset_metadata/dataset_record_keys.py | 4 +- apis/dataset_metadata/dataset_related_item.py | 4 +- .../dataset_related_item_contributor.py | 2 +- .../dataset_related_item_identifier.py | 2 +- .../dataset_related_item_other.py | 2 +- .../dataset_related_item_title.py | 3 +- apis/dataset_metadata/dataset_rights.py | 2 +- apis/dataset_metadata/dataset_subject.py | 2 +- apis/dataset_metadata/dataset_title.py | 2 +- apis/dataset_metadata_namespace.py | 4 + 22 files changed, 81 insertions(+), 122 deletions(-) delete mode 100644 apis/cats.py create mode 100644 apis/dataset_metadata_namespace.py diff --git a/apis/__init__.py b/apis/__init__.py index 623cdf30..fdb8c5fa 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -1,7 +1,9 @@ """Initialize the api system for the backend""" from flask_restx import Api, Resource -from .cats import api as cats_api +from apis.dataset_metadata_namespace import api as dataset_metadata_namespace +from apis.study_metadata_namespace import api as study_metadata_namespace + from .contributor import api as contributors_api from .dataset import api as dataset_api from .participant import api as participants_api @@ -24,7 +26,6 @@ from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator from .study_metadata.study_status import api as status -from .dataset_metadata_namespace import api as dataset_metadata_namespace from .dataset_metadata.dataset_access import api as access from .dataset_metadata.dataset_consent import api as consent @@ -41,12 +42,8 @@ from .dataset_metadata.dataset_title import api as title from .dataset_metadata.dataset_related_item import api as related_item from .dataset_metadata.dataset_related_item_title import api as related_item_title -from .dataset_metadata.dataset_related_item_contributor import ( - api as related_item_contributor, -) -from .dataset_metadata.dataset_related_item_identifier import ( - api as related_item_identifier, -) +from .dataset_metadata.dataset_related_item_contributor import api as related_item_contributor +from .dataset_metadata.dataset_related_item_identifier import api as related_item_identifier from .dataset_metadata.dataset_related_item_other import api as related_item_other from .dataset_metadata.dataset_funder import api as funder @@ -58,6 +55,10 @@ ) +api.add_namespace(dataset_metadata_namespace) +api.add_namespace(study_metadata_namespace) + + @api.route("/echo", endpoint="echo") class HelloWorld(Resource): @api.response(200, "Success") @@ -67,48 +68,45 @@ def get(self): return "Server active!" -#api.add_namespace(dataset_metadata_namespace) - -api.add_namespace(cats_api) api.add_namespace(study_api) api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) - -api.add_namespace(arm) -api.add_namespace(available_ipd) -api.add_namespace(contact) -api.add_namespace(description) -api.add_namespace(design) -api.add_namespace(eligibility) -api.add_namespace(identification) -api.add_namespace(intervention) -api.add_namespace(ipdsharing) -api.add_namespace(link) -api.add_namespace(location) -api.add_namespace(other) -api.add_namespace(overall_official) -api.add_namespace(reference) -api.add_namespace(sponsors_collaborator) -api.add_namespace(status) - - -api.add_namespace(access) -api.add_namespace(funder) -api.add_namespace(consent) -api.add_namespace(subject) -api.add_namespace(description) -api.add_namespace(identifier) -api.add_namespace(dataset_other) -api.add_namespace(date) -api.add_namespace(de_ident_level) -api.add_namespace(managing_organization) -api.add_namespace(readme) -api.add_namespace(record_keys) -api.add_namespace(rights) -api.add_namespace(title) -api.add_namespace(related_item) -# api.add_namespace(related_item_title) -# api.add_namespace(related_item_contributor) -# api.add_namespace(related_item_identifier) -# api.add_namespace(related_item_other) +# +# api.add_namespace(arm) +# api.add_namespace(available_ipd) +# api.add_namespace(contact) +# api.add_namespace(description) +# api.add_namespace(design) +# api.add_namespace(eligibility) +# api.add_namespace(identification) +# api.add_namespace(intervention) +# api.add_namespace(ipdsharing) +# api.add_namespace(link) +# api.add_namespace(location) +# api.add_namespace(other) +# api.add_namespace(overall_official) +# api.add_namespace(reference) +# api.add_namespace(sponsors_collaborator) +# api.add_namespace(status) +# +# +# api.add_namespace(access) +# api.add_namespace(funder) +# api.add_namespace(consent) +# api.add_namespace(subject) +# api.add_namespace(description) +# api.add_namespace(identifier) +# api.add_namespace(dataset_other) +# api.add_namespace(date) +# api.add_namespace(de_ident_level) +# api.add_namespace(managing_organization) +# api.add_namespace(readme) +# api.add_namespace(record_keys) +# api.add_namespace(rights) +# api.add_namespace(title) +# api.add_namespace(related_item) +# # api.add_namespace(related_item_title) +# # api.add_namespace(related_item_contributor) +# # api.add_namespace(related_item_identifier) +# # api.add_namespace(related_item_other) diff --git a/apis/cats.py b/apis/cats.py deleted file mode 100644 index 939606c6..00000000 --- a/apis/cats.py +++ /dev/null @@ -1,36 +0,0 @@ -from flask_restx import Namespace, Resource, fields - -api = Namespace("cats", description="Cats related operations") - -cat = api.model( - "Cat", - { - "id": fields.String(required=True, description="The cat identifier"), - "name": fields.String(required=True, description="The cat name"), - }, -) - -CATS = [ - {"id": "felix", "name": "Felix"}, -] - - -@api.route("/") -class CatList(Resource): - @api.doc("list_cats") - @api.marshal_list_with(cat) - def get(self): - return CATS - - -@api.route("/") -@api.param("id", "The cat identifier") -@api.response(404, "Cat not found") -class Cat(Resource): - @api.doc("get_cat") - @api.marshal_with(cat) - def get(self, id): - for cat in CATS: - if cat["id"] == id: - return cat - api.abort(404) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index 0af60fdb..d83011db 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -1,10 +1,9 @@ from model import Dataset, DatasetAccess, db -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request -# from ..dataset_metadata_namespace import api +from flask_restx import Resource, fields +from flask import request +from apis.dataset_metadata_namespace import api -api = Namespace("access", description="dataset operations", path="/") dataset_access = api.model( "DatasetAccess", diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index a91da035..aab860f6 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,11 +1,10 @@ from model import Dataset, DatasetConsent, db -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask_restx import Resource, fields +from flask import request +from apis.dataset_metadata_namespace import api -api = Namespace("consent", description="dataset operations", path="/") - dataset_consent = api.model( "DatasetConsent", { diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 63e60930..564bf850 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,10 +1,10 @@ from model import Dataset, db, DatasetDate -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask_restx import Resource, fields +from flask import request +from apis.dataset_metadata_namespace import api -api = Namespace("date", description="dataset operations", path="/") dataset_date = api.model( "DatasetDate", diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index f1cbaa53..ca9011a1 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -1,9 +1,9 @@ from model import Dataset, DatasetDeIdentLevel, db -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields from flask import request -api = Namespace("date", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api de_ident_level = api.model( "DatasetDeIdentLevel", diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 1842470e..c6f8cb27 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -1,10 +1,10 @@ from model import Dataset, db, DatasetDescription -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields from flask import request -api = Namespace("description", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_description = api.model( "DatasetDescription", diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index c9f6538a..6c7f5189 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,9 +1,9 @@ from model import Dataset, DatasetFunder, db -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields from flask import request -api = Namespace("description", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_funder = api.model( "DatasetFunder", diff --git a/apis/dataset_metadata/dataset_identifier.py b/apis/dataset_metadata/dataset_identifier.py index ef589b0b..ad57901c 100644 --- a/apis/dataset_metadata/dataset_identifier.py +++ b/apis/dataset_metadata/dataset_identifier.py @@ -1,8 +1,8 @@ from model import Dataset, db, DatasetIdentifier -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields from flask import request -api = Namespace("identifier", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_identifier = api.model( "DatasetIdentifier", diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index b65c74e3..517d5874 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -1,10 +1,10 @@ from model import Dataset, db, DatasetManagingOrganization from flask import request -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields -api = Namespace("managing_organization", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api managing_organization = api.model( "DatasetManagingOrganization", diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index f9b41222..5ecf56d3 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("dataset_other", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_other = api.model( "DatasetOther", diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index cf9a74ca..849af863 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("readme", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_readme = api.model( "DatasetReadme", diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 5f95a243..e3e4971d 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -2,9 +2,7 @@ from flask_restx import Namespace, Resource, fields from flask import jsonify, request - - -api = Namespace("record_keys", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_record_keys = api.model( "DatasetRecordKeys", diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 9712d344..56b08ed2 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -2,9 +2,7 @@ from flask_restx import Namespace, Resource, fields from flask import jsonify, request - - -api = Namespace("related_item", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_related_item = api.model( "DatasetRelatedItem", diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py index b3f1699c..d48860a8 100644 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields from flask import jsonify, request -api = Namespace("related_item_contributor", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py index 004f6be9..185fbcb5 100644 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -4,7 +4,7 @@ from flask import jsonify, request -api = Namespace("related_item", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py index d800cf45..c055fc26 100644 --- a/apis/dataset_metadata/dataset_related_item_other.py +++ b/apis/dataset_metadata/dataset_related_item_other.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields from flask import jsonify, request -api = Namespace("related_item_other", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py index 427e1b7f..7cc2da80 100644 --- a/apis/dataset_metadata/dataset_related_item_title.py +++ b/apis/dataset_metadata/dataset_related_item_title.py @@ -1,10 +1,9 @@ from model import Dataset, DatasetRelatedItemTitle, db from flask_restx import Namespace, Resource, fields -from flask import jsonify, request -api = Namespace("related_item_title", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api # dataset_related_item_contributor = api.model( # "DatasetRelatedItemTitle", diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 261d1439..8e42db42 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields from flask import jsonify, request -api = Namespace("dataset_rights", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_rights = api.model( "DatasetRights", diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 131bde5e..004b8fc9 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields from flask import jsonify, request -api = Namespace("description", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_subject = api.model( "DatasetSubject", diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 4257c5de..5faf600f 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -4,7 +4,7 @@ from flask import jsonify, request -api = Namespace("title", description="dataset operations", path="/") +from apis.dataset_metadata_namespace import api dataset_title = api.model( "DatasetTitle", diff --git a/apis/dataset_metadata_namespace.py b/apis/dataset_metadata_namespace.py new file mode 100644 index 00000000..57aad026 --- /dev/null +++ b/apis/dataset_metadata_namespace.py @@ -0,0 +1,4 @@ +from flask_restx import Namespace + + +api = Namespace("Dataset Metadata", description="dataset operations", path="/") From 97101c06bbea45b0dee795f1d2ae6623ac353efb Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 25 Aug 2023 00:42:37 +0000 Subject: [PATCH 041/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 8 ++++++-- apis/dataset_metadata/dataset_date.py | 1 - 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index eb335775..0db0a90f 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -42,8 +42,12 @@ from .dataset_metadata.dataset_title import api as title from .dataset_metadata.dataset_related_item import api as related_item from .dataset_metadata.dataset_related_item_title import api as related_item_title -from .dataset_metadata.dataset_related_item_contributor import api as related_item_contributor -from .dataset_metadata.dataset_related_item_identifier import api as related_item_identifier +from .dataset_metadata.dataset_related_item_contributor import ( + api as related_item_contributor, +) +from .dataset_metadata.dataset_related_item_identifier import ( + api as related_item_identifier, +) from .dataset_metadata.dataset_related_item_other import api as related_item_other from .dataset_metadata.dataset_funder import api as funder diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 564bf850..d2993545 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -5,7 +5,6 @@ from apis.dataset_metadata_namespace import api - dataset_date = api.model( "DatasetDate", { From 62bbd337adc79482c116558be0c9a96a6a8e69d2 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 24 Aug 2023 17:49:23 -0700 Subject: [PATCH 042/505] fix: replaced namespace in study metadata --- apis/study_metadata/study_arm.py | 2 +- apis/study_metadata/study_available_ipd.py | 2 +- apis/study_metadata/study_contact.py | 2 +- apis/study_metadata/study_description.py | 2 +- apis/study_metadata/study_design.py | 2 +- apis/study_metadata/study_eligibility.py | 2 +- apis/study_metadata/study_identification.py | 2 +- apis/study_metadata/study_intervention.py | 2 +- apis/study_metadata/study_ipdsharing.py | 2 +- apis/study_metadata/study_link.py | 2 +- apis/study_metadata/study_location.py | 2 +- apis/study_metadata/study_other.py | 2 +- apis/study_metadata/study_overall_official.py | 2 +- apis/study_metadata/study_reference.py | 2 +- apis/study_metadata/study_sponsors_collaborators.py | 2 +- apis/study_metadata/study_status.py | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 15fdc3bb..3ef312d0 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -2,7 +2,7 @@ from model import Study -api = Namespace("arm", description="study operations", path="/") +from apis.study_metadata_namespace import api study_arm = api.model( diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 70191530..54d27fd6 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import Study -api = Namespace("available_ipd", description="study operations", path="/") +from apis.study_metadata_namespace import api study_available = api.model( diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index c25d4ec7..70c643e2 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("study", description="study operations", path="/") +from apis.study_metadata_namespace import api study_contact = api.model( "StudyContact", diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 1b5a6271..c72f0d0c 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("description", description="study operations", path="/") +from apis.study_metadata_namespace import api study_description = api.model( diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index f2ee2694..17c5c828 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -2,7 +2,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("design", description="study operations", path="/") +from apis.study_metadata_namespace import api study_design = api.model( "StudyDesign", diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 79956be8..13079c33 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("eligibility", description="study operations", path="/") +from apis.study_metadata_namespace import api study_eligibility = api.model( diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index fa82369d..0d3c47b3 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("identification", description="study operations", path="/") +from apis.study_metadata_namespace import api study_identification = api.model( diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index a03658f5..e70eb5c9 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("intervention", description="study operations", path="/") +from apis.study_metadata_namespace import api study_intervention = api.model( diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index d4557ba8..9c7cc2b0 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("ipdsharing", description="study operations", path="/") +from apis.study_metadata_namespace import api study_ipdsharing = api.model( diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 53f25982..de4e5cf5 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("link", description="study operations", path="/") +from apis.study_metadata_namespace import api study_link = api.model( diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index eb6a0f7b..272b19f0 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -3,7 +3,7 @@ from flask_restx import Namespace, Resource, fields -api = Namespace("location", description="study operations", path="/") +from apis.study_metadata_namespace import api study_location = api.model( diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 9bc633e1..6ade53c8 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import Study -api = Namespace("other", description="study operations", path="/") +from apis.study_metadata_namespace import api study_other = api.model( diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index efe11efe..73cfdf90 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import Study -api = Namespace("overall_official", description="study operations", path="/") +from apis.study_metadata_namespace import api study_overall_official = api.model( diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index efc846f2..f2f40298 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import Study -api = Namespace("reference", description="study operations", path="/") +from apis.study_metadata_namespace import api study_reference = api.model( diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index c5875126..f5f8cc29 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import Study -api = Namespace("sponsors_collaborators", description="study operations", path="/") +from apis.study_metadata_namespace import api study_sponsors_collaborators = api.model( diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 79d53d2d..c325d9c2 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import Study -api = Namespace("status", description="study operations", path="/") +from apis.study_metadata_namespace import api study_status = api.model( From 4dbb287ef8ca556bb7e223fc6effa1ae01baa41b Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 25 Aug 2023 14:04:02 -0700 Subject: [PATCH 043/505] fix: modified PUT POST for one-t-one study metadata --- apis/study_metadata/study_description.py | 20 ++++++++++++++++-- apis/study_metadata/study_design.py | 21 +++++++++++++++++-- apis/study_metadata/study_eligibility.py | 21 +++++++++++++++++-- apis/study_metadata/study_ipdsharing.py | 20 ++++++++++++++++-- apis/study_metadata/study_other.py | 19 ++++++++++++++++- .../study_sponsors_collaborators.py | 21 ++++++++++++++++++- apis/study_metadata/study_status.py | 20 +++++++++++++++++- model/study_metadata/study_description.py | 8 +++---- model/study_metadata/study_design.py | 8 +++---- model/study_metadata/study_eligibility.py | 8 +++---- model/study_metadata/study_ipdsharing.py | 8 +++---- model/study_metadata/study_other.py | 7 ++++--- .../study_sponsors_collaborators.py | 7 ++++--- model/study_metadata/study_status.py | 19 +++++++++-------- 14 files changed, 165 insertions(+), 42 deletions(-) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index c72f0d0c..04096b32 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -1,6 +1,6 @@ -from model import Study - from flask_restx import Namespace, Resource, fields +from model import Study, db, StudyDescription +from flask import request from apis.study_metadata_namespace import api @@ -27,3 +27,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_description_ = study_.study_description return [s.to_dict() for s in study_description_] + + def post(self, study_id: int): + data = request.json + study_description_ = Study.query.get(study_id) + study_description_ = StudyDescription.from_data(study_description_, data) + db.session.add(study_description_) + db.session.commit() + return study_description_.to_dict() + + @api.route("/study//metadata/description/") + class StudyDescriptionUpdate(Resource): + def put(self, study_id: int, description_id: int): + study_description_ = StudyDescription.query.get(description_id) + study_description_.update(request.json) + db.session.commit() + return study_description_.to_dict() diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 17c5c828..b0bf4dbb 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -1,6 +1,7 @@ -from model import Study - from flask_restx import Namespace, Resource, fields +from model import Study, db, StudyDesign +from flask import request + from apis.study_metadata_namespace import api @@ -41,3 +42,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_design_ = study_.study_design return [s.to_dict() for s in study_design_] + + def post(self, study_id: int): + data = request.json + study_design_ = Study.query.get(study_id) + study_design_ = StudyDesign.from_data(study_design_, data) + db.session.add(study_design_) + db.session.commit() + return study_design_.to_dict() + + @api.route("/study//metadata/design/") + class StudyDesignUpdate(Resource): + def put(self, study_id: int, design_id: int): + study_design_ = StudyDesign.query.get(design_id) + study_design_.update(request.json) + db.session.commit() + return study_design_.to_dict() diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 13079c33..1ea6ea71 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -1,6 +1,7 @@ -from model import Study +from flask_restx import Resource, fields +from model import Study, db, StudyEligibility +from flask import request -from flask_restx import Namespace, Resource, fields from apis.study_metadata_namespace import api @@ -35,3 +36,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_eligibility_ = study_.study_eligibility return [s.to_dict() for s in study_eligibility_] + + def post(self, study_id: int): + data = request.json + study_eligibility_ = Study.query.get(study_id) + study_eligibility_ = StudyEligibility.from_data(study_eligibility_, data) + db.session.add(study_eligibility_) + db.session.commit() + return study_eligibility_.to_dict() + + @api.route("/study//metadata/eligibility/") + class StudyArmUpdate(Resource): + def put(self, study_id: int, eligibility_id: int): + study_eligibility_ = StudyEligibility.query.get(eligibility_id) + study_eligibility_.update(request.json) + db.session.commit() + return study_eligibility_.to_dict() diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 9c7cc2b0..d0c7a568 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -1,6 +1,6 @@ -from model import Study - from flask_restx import Namespace, Resource, fields +from model import Study, db, StudyIpdsharing +from flask import request from apis.study_metadata_namespace import api @@ -31,3 +31,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_ipdsharing_ = study_.study_ipdsharing return [s.to_dict() for s in study_ipdsharing_] + + def post(self, study_id: int): + data = request.json + study_ipdsharing_ = Study.query.get(study_id) + study_ipdsharing_ = StudyIpdsharing.from_data(study_ipdsharing_, data) + db.session.add(study_ipdsharing_) + db.session.commit() + return study_ipdsharing_.to_dict() + + @api.route("/study//metadata/ipdsharing/") + class StudyIpdsharingUpdate(Resource): + def put(self, study_id: int, study_ipdsharing_id: int): + study_ipdsharing_ = StudyIpdsharing.query.get(study_ipdsharing_id) + study_ipdsharing_.update(request.json) + db.session.commit() + return study_ipdsharing_.to_dict() diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 6ade53c8..26cb0dde 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -1,5 +1,6 @@ from flask_restx import Namespace, Resource, fields -from model import Study +from model import Study, db, StudyOther +from flask import request from apis.study_metadata_namespace import api @@ -27,3 +28,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_other_ = study_.study_other return [s.to_dict() for s in study_other_] + + def post(self, study_id: int): + data = request.json + study_other_ = Study.query.get(study_id) + study_other_ = StudyOther.from_data(study_other_, data) + db.session.add(study_other_) + db.session.commit() + return study_other_.to_dict() + + @api.route("/study//metadata/other/") + class StudyOtherUpdate(Resource): + def put(self, study_id: int, other_id: int): + study_other_ = StudyOther.query.get(other_id) + study_other_.update(request.json) + db.session.commit() + return study_other_.to_dict() diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index f5f8cc29..7d3b67d7 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -1,5 +1,8 @@ from flask_restx import Namespace, Resource, fields -from model import Study +from model import Study, db, StudySponsorsCollaborators +from flask import request + + from apis.study_metadata_namespace import api @@ -31,3 +34,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_sponsors_collaborators_ = study_.study_sponsors_collaborators return [s.to_dict() for s in study_sponsors_collaborators_] + + def post(self, study_id: int): + data = request.json + study_sponsors_collaborators_ = Study.query.get(study_id) + study_sponsors_collaborators_ = StudySponsorsCollaborators.from_data(study_sponsors_collaborators_, data) + db.session.add(study_sponsors_collaborators_) + db.session.commit() + return study_sponsors_collaborators_.to_dict() + + @api.route("/study//metadata/sponsors_collaborators/") + class StudySponsorsCollaboratorsUpdate(Resource): + def put(self, study_id: int, sponsors_collaborators_id: int): + study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get(sponsors_collaborators_id) + study_sponsors_collaborators_.update(request.json) + db.session.commit() + return study_sponsors_collaborators_.to_dict() diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index c325d9c2..b321e310 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -1,5 +1,7 @@ from flask_restx import Namespace, Resource, fields -from model import Study +from model import Study, db, StudyStatus +from flask import request + from apis.study_metadata_namespace import api @@ -29,3 +31,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_status_ = study_.study_status return [s.to_dict() for s in study_status_] + + def post(self, study_id: int): + data = request.json + study_status_ = Study.query.get(study_id) + study_status_ = StudyStatus.from_data(study_status_, data) + db.session.add(study_status_) + db.session.commit() + return study_status_.to_dict() + + @api.route("/study//metadata/status/") + class StudyStatusUpdate(Resource): + def put(self, study_id: int, study_status_id: int): + study_status_ = StudyStatus.query.get(study_status_id) + study_status_.update(request.json) + db.session.commit() + return study_status_.to_dict() diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index 9fbd47b8..f61c986b 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -5,9 +5,9 @@ class StudyDescription(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_description" id = db.Column(db.CHAR(36), primary_key=True) @@ -26,9 +26,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_description = StudyDescription() + study_description = StudyDescription(study) study_description.update(data) return study_description diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 59cdb7a6..bd7eafb8 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -8,9 +8,9 @@ class StudyDesign(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_design" id = db.Column(db.CHAR(36), primary_key=True) @@ -63,9 +63,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_design = StudyDesign() + study_design = StudyDesign(study) study_design.update(data) return study_design diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 50888e84..bbefe1b7 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -7,9 +7,9 @@ class StudyEligibility(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_eligibility" id = db.Column(db.CHAR(36), primary_key=True) @@ -44,9 +44,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_eligibility = StudyEligibility() + study_eligibility = StudyEligibility(study) study_eligibility.update(data) return study_eligibility diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 914c5670..83811389 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -7,9 +7,9 @@ class StudyIpdsharing(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_ipdsharing" id = db.Column(db.CHAR(36), primary_key=True) @@ -36,9 +36,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_ipdsharing = StudyIpdsharing() + study_ipdsharing = StudyIpdsharing(study) study_ipdsharing.update(data) return study_ipdsharing diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 04c3b04b..fee797a8 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -8,8 +8,9 @@ class StudyOther(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) + self.study = study __tablename__ = "study_other" @@ -33,9 +34,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_other = StudyOther() + study_other = StudyOther(study) study_other.update(data) return study_other diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 52ec4a54..f3c5a24e 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -7,8 +7,9 @@ class StudySponsorsCollaborators(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) + self.study = study __tablename__ = "study_sponsors_collaborators" @@ -40,9 +41,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_sponsors_collaborators = StudySponsorsCollaborators() + study_sponsors_collaborators = StudySponsorsCollaborators(study) study_sponsors_collaborators.update(data) return study_sponsors_collaborators diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 5d4065e9..fb83da35 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -6,9 +6,10 @@ class StudyStatus(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) # self.created_at = datetime.now() + self.study = study __tablename__ = "study_status" @@ -36,21 +37,21 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_status = StudyStatus() + study_status = StudyStatus(study) study_status.update(data) return study_status def update(self, data): """Updates the study from a dictionary""" - self.overall_status = data["title"] - self.why_stopped = data["image"] - self.start_date = data["created_at"] - self.start_date_type = data["updated_on"] - self.completion_date = data["title"] - self.completion_date_type = data["image"] + self.overall_status = data["overall_status"] + self.why_stopped = data["why_stopped"] + self.start_date = data["start_date"] + self.start_date_type = data["start_date_type"] + self.completion_date = data["completion_date"] + self.completion_date_type = data["completion_date_type"] def validate(self): """Validates the study""" From 60ebf5f140e3e46324bb4f7096844e7730a7b91f Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 25 Aug 2023 14:58:37 -0700 Subject: [PATCH 044/505] fix: modified PUT POST for study metadata --- apis/study_metadata/study_arm.py | 21 ++++++++++++++-- apis/study_metadata/study_available_ipd.py | 24 +++++++++++++++---- apis/study_metadata/study_contact.py | 22 +++++++++++++---- apis/study_metadata/study_identification.py | 20 ++++++++++++++-- apis/study_metadata/study_intervention.py | 21 ++++++++++++++-- apis/study_metadata/study_link.py | 20 ++++++++++++++-- apis/study_metadata/study_location.py | 21 ++++++++++++++-- apis/study_metadata/study_overall_official.py | 20 +++++++++++++++- apis/study_metadata/study_reference.py | 21 +++++++++++++++- model/study_metadata/study_arm.py | 9 ++++--- model/study_metadata/study_available_ipd.py | 8 +++---- model/study_metadata/study_contact.py | 8 +++---- model/study_metadata/study_identification.py | 8 +++---- model/study_metadata/study_intervention.py | 8 +++---- model/study_metadata/study_link.py | 8 +++---- model/study_metadata/study_location.py | 7 +++--- .../study_metadata/study_overall_official.py | 7 +++--- model/study_metadata/study_reference.py | 7 +++--- 18 files changed, 206 insertions(+), 54 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 3ef312d0..0cf1d38c 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,5 +1,6 @@ from flask_restx import Namespace, Resource, fields -from model import Study +from model import Study, db, StudyArm +from flask import request from apis.study_metadata_namespace import api @@ -18,7 +19,7 @@ @api.route("/study//metadata/arm") -class StudyArm(Resource): +class StudyArmResource(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -28,3 +29,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_arm_ = study_.study_arm return [s.to_dict() for s in study_arm_] + + def post(self, study_id: int): + data = request.json + study_obj = Study.query.get(study_id) + study_arm_ = StudyArm.from_data(study_obj, data) + db.session.add(study_arm_) + db.session.commit() + return study_arm_.to_dict() + # + # @api.route("/study//metadata/arm/") + # class StudyArmUpdate(Resource): + # def put(self, study_id: int, arm_id: int): + # study_arm_ = StudyArm.query.get(arm_id) + # study_arm_.update(request.json) + # db.session.commit() + # return study_arm_.to_dict() diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 54d27fd6..50203842 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,9 +1,9 @@ from flask_restx import Namespace, Resource, fields -from model import Study +from model import Study, db, StudyAvailableIpd +from flask import request from apis.study_metadata_namespace import api - study_available = api.model( "StudyAvailable", { @@ -25,5 +25,21 @@ class StudyAvailableResource(Resource): @api.marshal_with(study_available) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_available_ = study_.study_available_ipd - return [s.to_dict() for s in study_available_] + study_available_ipd = study_.study_available_ipd + return [s.to_dict() for s in study_available_ipd] + + def post(self, study_id: int): + data = request.json + study_available_ipd_ = Study.query.get(study_id) + study_available_ipd_ = StudyAvailableIpd.from_data(study_available_ipd_, data) + db.session.add(study_available_ipd_) + db.session.commit() + return study_available_ipd_.to_dict() + + # @api.route("/study//metadata/available_ipd/") + # class StudyAvailableIpdUpdate(Resource): + # def put(self, study_id: int, available_ipd_id: int): + # study_available_ipd_ = StudyAvailableIpd.query.get(available_ipd_id) + # study_available_ipd_.update(request.json) + # db.session.commit() + # return study_available_ipd_.to_dict() diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 70c643e2..ad65da84 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -1,8 +1,6 @@ -from model import Study - from flask_restx import Namespace, Resource, fields - - +from model import Study, db, StudyContact +from flask import request from apis.study_metadata_namespace import api study_contact = api.model( @@ -32,3 +30,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_contact_ = study_.study_contact return [s.to_dict() for s in study_contact_] + + def post(self, study_id: int): + data = request.json + study_contact_ = Study.query.get(study_id) + study_contact_ = StudyContact.from_data(study_contact_, data) + db.session.add(study_contact_) + db.session.commit() + return study_contact_.to_dict() + + # @api.route("/study//metadata/arm/") + # class StudyArmUpdate(Resource): + # def put(self, study_id: int, arm_id: int): + # study_arm_ = StudyContact.query.get(arm_id) + # study_arm_.update(request.json) + # db.session.commit() + # return study_arm_.to_dict() diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 0d3c47b3..46706a63 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,6 +1,6 @@ -from model import Study - from flask_restx import Namespace, Resource, fields +from model import Study, db, StudyIdentification +from flask import request from apis.study_metadata_namespace import api @@ -30,3 +30,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_identification_ = study_.study_identification return [s.to_dict() for s in study_identification_] + + def post(self, study_id: int): + data = request.json + study_identification_ = Study.query.get(study_id) + study_identification_ = StudyIdentification.from_data(study_identification_, data) + db.session.add(study_identification_) + db.session.commit() + return study_identification_.to_dict() + + # @api.route("/study//metadata/available_ipd/") + # class StudyIdentificationdUpdate(Resource): + # def put(self, study_id: int, available_ipd_id: int): + # study_available_ipd_ = StudyIdentification.query.get(available_ipd_id) + # study_available_ipd_.update(request.json) + # db.session.commit() + # return study_available_ipd_.to_dict() diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index e70eb5c9..389b8d15 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -1,6 +1,7 @@ -from model import Study +from flask_restx import Resource, fields +from model import Study, db, StudyIntervention +from flask import request -from flask_restx import Namespace, Resource, fields from apis.study_metadata_namespace import api @@ -30,3 +31,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_intervention_ = study_.study_intervention return [s.to_dict() for s in study_intervention_] + + def post(self, study_id: int): + data = request.json + study_intervention_ = Study.query.get(study_id) + study_intervention_ = StudyIntervention.from_data(study_intervention_, data) + db.session.add(study_intervention_) + db.session.commit() + return study_intervention_.to_dict() + + # @api.route("/study//metadata/available_ipd/") + # class StudyInterventionUpdate(Resource): + # def put(self, study_id: int, available_ipd_id: int): + # study_intervention_ = StudyIntervention.query.get(study_intervention_) + # study_intervention_.update(request.json) + # db.session.commit() + # return study_intervention_.to_dict() diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index de4e5cf5..70405b72 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -1,6 +1,6 @@ -from model import Study - from flask_restx import Namespace, Resource, fields +from model import Study, db, StudyLink +from flask import request from apis.study_metadata_namespace import api @@ -27,3 +27,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_link_ = study_.study_link return [s.to_dict() for s in study_link_] + + def post(self, study_id: int): + data = request.json + study_link_ = Study.query.get(study_id) + study_link_ = StudyLink.from_data(study_link_, data) + db.session.add(study_link_) + db.session.commit() + return study_link_.to_dict() + + # @api.route("/study//metadata/available_ipd/") + # class StudyLinkUpdate(Resource): + # def put(self, study_id: int, available_ipd_id: int): + # study_link_ = StudyLink.query.get(study_link_) + # study_link_.update(request.json) + # db.session.commit() + # return study_intervention_.to_dict() diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 272b19f0..8c9c31f3 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -1,6 +1,7 @@ -from model import Study +from flask_restx import Resource, fields +from model import Study, db, StudyLocation +from flask import request -from flask_restx import Namespace, Resource, fields from apis.study_metadata_namespace import api @@ -31,3 +32,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_location_ = study_.study_location return [s.to_dict() for s in study_location_] + + def post(self, study_id: int): + data = request.json + study_location_ = Study.query.get(study_id) + study_location_ = StudyLocation.from_data(study_location_, data) + db.session.add(study_location_) + db.session.commit() + return study_location_.to_dict() + + # @api.route("/study//metadata/available_ipd/") + # class StudyLocationUpdate(Resource): + # def put(self, study_id: int, available_ipd_id: int): + # study_location_ = StudyLocation.query.get(study_location_) + # study_location_.update(request.json) + # db.session.commit() + # return study_location_.to_dict() diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 73cfdf90..65f07271 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,5 +1,7 @@ from flask_restx import Namespace, Resource, fields -from model import Study +from model import Study, db, StudyOverallOfficial +from flask import request + from apis.study_metadata_namespace import api @@ -27,3 +29,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_overall_official_ = study_.study_overall_official return [s.to_dict() for s in study_overall_official_] + + def post(self, study_id: int): + data = request.json + study_overall_official_ = Study.query.get(study_id) + study_overall_official_ = StudyOverallOfficial.from_data(study_overall_official_, data) + db.session.add(study_overall_official_) + db.session.commit() + return study_overall_official_.to_dict() + + # @api.route("/study//metadata/available_ipd/") + # class StudyOverallOfficialUpdate(Resource): + # def put(self, study_id: int, available_ipd_id: int): + # study_overall_official_ = StudyOverallOfficial.query.get(study_overall_official_) + # study_overall_official_.update(request.json) + # db.session.commit() + # return study_overall_official_.to_dict() diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index f2f40298..73bfd970 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,5 +1,8 @@ from flask_restx import Namespace, Resource, fields -from model import Study +from model import Study, db, StudyReference +from flask import request + + from apis.study_metadata_namespace import api @@ -27,3 +30,19 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_reference_ = study_.study_reference return [s.to_dict() for s in study_reference_] + + def post(self, study_id: int): + data = request.json + study_reference_ = Study.query.get(study_id) + study_reference_ = StudyReference.from_data(study_reference_, data) + db.session.add(study_reference_) + db.session.commit() + return study_reference_.to_dict() + + # @api.route("/study//metadata/available_ipd/") + # class StudyReferenceUpdate(Resource): + # def put(self, study_id: int, available_ipd_id: int): + # study_location_ = StudyReference.query.get(study_location_) + # study_location_.update(request.json) + # db.session.commit() + # return study_location_.to_dict() diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index a81d5c97..010a251a 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -8,9 +8,9 @@ class StudyArm(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_arm" id = db.Column(db.CHAR(36), primary_key=True) @@ -33,11 +33,10 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_arm = StudyArm() + study_arm = StudyArm(study) study_arm.update(data) - return study_arm def update(self, data): diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index a56abadf..72fca85b 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -6,9 +6,9 @@ class StudyAvailableIpd(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_available_ipd" id = db.Column(db.CHAR(36), primary_key=True) @@ -31,9 +31,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_available = StudyAvailableIpd() + study_available = StudyAvailableIpd(study) study_available.update(data) return study_available diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index de4116fe..20d87091 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -5,9 +5,9 @@ class StudyContact(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_contact" id = db.Column(db.CHAR(36), primary_key=True) @@ -38,9 +38,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_contact = StudyContact() + study_contact = StudyContact(study) study_contact.update(data) return study_contact diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 11b5c1ab..f73e2f39 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -5,9 +5,9 @@ class StudyIdentification(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_identification" id = db.Column(db.CHAR(36), primary_key=True) @@ -32,9 +32,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_identification = StudyIdentification() + study_identification = StudyIdentification(study) study_identification.update(data) return study_identification diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index d20d270b..c45fef11 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -7,9 +7,9 @@ class StudyIntervention(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) - + self.study = study __tablename__ = "study_intervention" id = db.Column(db.CHAR(36), primary_key=True) @@ -34,9 +34,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_intervention = StudyIntervention() + study_intervention = StudyIntervention(study) study_intervention.update(data) return study_intervention diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 44d379cb..5c9a4613 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -5,8 +5,9 @@ class StudyLink(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) + self.study = study __tablename__ = "study_link" @@ -22,11 +23,10 @@ def to_dict(self): return {"id": self.id, "url": self.url, "title": self.title} @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_link = StudyLink() + study_link = StudyLink(study) study_link.update(data) - return study_link def update(self, data): diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 88bd7915..1a954383 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -5,8 +5,9 @@ class StudyLocation(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) + self.study = study __tablename__ = "study_location" @@ -34,9 +35,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_location = StudyLocation() + study_location = StudyLocation(study) study_location.update(data) return study_location diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index b1140d45..a8d3d12d 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -5,8 +5,9 @@ class StudyOverallOfficial(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) + self.study = study __tablename__ = "study_overall_official" @@ -30,9 +31,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_overall_official = StudyOverallOfficial() + study_overall_official = StudyOverallOfficial(study) study_overall_official.update(data) return study_overall_official diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index afa685b0..8ef08eac 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -6,8 +6,9 @@ class StudyReference(db.Model): """A study is a collection of datasets and participants""" - def __init__(self): + def __init__(self, study): self.id = str(uuid.uuid4()) + self.study = study __tablename__ = "study_reference" @@ -31,9 +32,9 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): + def from_data(study, data: dict): """Creates a new study from a dictionary""" - study_reference = StudyReference() + study_reference = StudyReference(study) study_reference.update(data) return study_reference From 7e07fc9294c06c98b14244c462708866da6759be Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 27 Aug 2023 15:21:34 -0700 Subject: [PATCH 045/505] feat: added upsert logic to study metadata (1-to-1) --- apis/study_metadata/study_arm.py | 21 ++++++++++++------- apis/study_metadata/study_available_ipd.py | 16 ++++++++++---- apis/study_metadata/study_contact.py | 17 ++++++++++----- apis/study_metadata/study_identification.py | 16 ++++++++++---- apis/study_metadata/study_intervention.py | 18 ++++++++++++---- apis/study_metadata/study_link.py | 17 ++++++++++----- apis/study_metadata/study_location.py | 17 +++++++++++---- apis/study_metadata/study_overall_official.py | 17 ++++++++++----- apis/study_metadata/study_reference.py | 17 ++++++++++----- model/study_metadata/study_arm.py | 2 +- 10 files changed, 114 insertions(+), 44 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 0cf1d38c..cd89cc76 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,4 +1,4 @@ -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields from model import Study, db, StudyArm from flask import request @@ -20,10 +20,8 @@ @api.route("/study//metadata/arm") class StudyArmResource(Resource): - @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") @api.marshal_with(study_arm) def get(self, study_id: int): study_ = Study.query.get(study_id) @@ -33,11 +31,20 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json study_obj = Study.query.get(study_id) - study_arm_ = StudyArm.from_data(study_obj, data) - db.session.add(study_arm_) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_arm_ = StudyArm.query.get(i["id"]) + study_arm_.update(i) + list_of_elements.append(study_arm_.to_dict()) + elif "id" not in i or not i["id"]: + study_arm_ = StudyArm.from_data(study_obj, i) + db.session.add(study_arm_) + list_of_elements.append(study_arm_.to_dict()) db.session.commit() - return study_arm_.to_dict() - # + + return list_of_elements + # @api.route("/study//metadata/arm/") # class StudyArmUpdate(Resource): # def put(self, study_id: int, arm_id: int): diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 50203842..7740ab20 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -30,11 +30,19 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_available_ipd_ = Study.query.get(study_id) - study_available_ipd_ = StudyAvailableIpd.from_data(study_available_ipd_, data) - db.session.add(study_available_ipd_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_available_ipd_ = StudyAvailableIpd.query.get(i["id"]) + study_available_ipd_.update(i) + list_of_elements.append(study_available_ipd_.to_dict()) + elif "id" not in i or not i["id"]: + study_available_ipd_ = StudyAvailableIpd.from_data(study_obj, i) + db.session.add(study_available_ipd_) + list_of_elements.append(study_available_ipd_.to_dict()) db.session.commit() - return study_available_ipd_.to_dict() + return list_of_elements # @api.route("/study//metadata/available_ipd/") # class StudyAvailableIpdUpdate(Resource): diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index ad65da84..46159d84 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -33,12 +33,19 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_contact_ = Study.query.get(study_id) - study_contact_ = StudyContact.from_data(study_contact_, data) - db.session.add(study_contact_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_contact_ = StudyContact.query.get(i["id"]) + study_contact_.update(i) + list_of_elements.append(study_contact_.to_dict()) + elif "id" not in i or not i["id"]: + study_contact_ = StudyContact.from_data(study_obj, i) + db.session.add(study_contact_) + list_of_elements.append(study_contact_.to_dict()) db.session.commit() - return study_contact_.to_dict() - + return list_of_elements # @api.route("/study//metadata/arm/") # class StudyArmUpdate(Resource): # def put(self, study_id: int, arm_id: int): diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 46706a63..2966c44c 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -33,11 +33,19 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_identification_ = Study.query.get(study_id) - study_identification_ = StudyIdentification.from_data(study_identification_, data) - db.session.add(study_identification_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_identification_ = StudyIdentification.query.get(i["id"]) + study_identification_.update(i) + list_of_elements.append(study_identification_.to_dict()) + elif "id" not in i or not i["id"]: + study_identification_ = StudyIdentification.from_data(study_obj, i) + db.session.add(study_identification_) + list_of_elements.append(study_identification_.to_dict()) db.session.commit() - return study_identification_.to_dict() + return list_of_elements # @api.route("/study//metadata/available_ipd/") # class StudyIdentificationdUpdate(Resource): diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 389b8d15..085d7182 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -34,11 +34,21 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_intervention_ = Study.query.get(study_id) - study_intervention_ = StudyIntervention.from_data(study_intervention_, data) - db.session.add(study_intervention_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_intervention_ = StudyIntervention.query.get(i["id"]) + study_intervention_.update(i) + list_of_elements.append(study_intervention_.to_dict()) + elif "id" not in i or not i["id"]: + study_intervention_ = StudyIntervention.from_data(study_obj, i) + db.session.add(study_intervention_) + list_of_elements.append(study_intervention_.to_dict()) db.session.commit() - return study_intervention_.to_dict() + + return list_of_elements + # @api.route("/study//metadata/available_ipd/") # class StudyInterventionUpdate(Resource): diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 70405b72..8118ab4e 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -30,12 +30,19 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_link_ = Study.query.get(study_id) - study_link_ = StudyLink.from_data(study_link_, data) - db.session.add(study_link_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_link_ = StudyLink.query.get(i["id"]) + study_link_.update(i) + list_of_elements.append(study_link_.to_dict()) + elif "id" not in i or not i["id"]: + study_link_ = StudyLink.from_data(study_obj, i) + db.session.add(study_link_) + list_of_elements.append(study_link_.to_dict()) db.session.commit() - return study_link_.to_dict() - + return list_of_elements # @api.route("/study//metadata/available_ipd/") # class StudyLinkUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 8c9c31f3..f12032aa 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -35,11 +35,20 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_location_ = Study.query.get(study_id) - study_location_ = StudyLocation.from_data(study_location_, data) - db.session.add(study_location_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_location_ = StudyLocation.query.get(i["id"]) + study_location_.update(i) + list_of_elements.append(study_location_.to_dict()) + elif "id" not in i or not i["id"]: + study_location_ = StudyLocation.from_data(study_obj, i) + db.session.add(study_location_) + list_of_elements.append(study_location_.to_dict()) db.session.commit() - return study_location_.to_dict() + return list_of_elements + # @api.route("/study//metadata/available_ipd/") # class StudyLocationUpdate(Resource): diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 65f07271..88580444 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -32,12 +32,19 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_overall_official_ = Study.query.get(study_id) - study_overall_official_ = StudyOverallOfficial.from_data(study_overall_official_, data) - db.session.add(study_overall_official_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_overall_official_ = StudyOverallOfficial.query.get(i["id"]) + study_overall_official_.update(i) + list_of_elements.append(study_overall_official_.to_dict()) + elif "id" not in i or not i["id"]: + study_overall_official_ = StudyOverallOfficial.from_data(study_obj, i) + db.session.add(study_overall_official_) + list_of_elements.append(study_overall_official_.to_dict()) db.session.commit() - return study_overall_official_.to_dict() - + return list_of_elements # @api.route("/study//metadata/available_ipd/") # class StudyOverallOfficialUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 73bfd970..1f76d789 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -33,12 +33,19 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json - study_reference_ = Study.query.get(study_id) - study_reference_ = StudyReference.from_data(study_reference_, data) - db.session.add(study_reference_) + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + study_reference_ = StudyReference.query.get(i["id"]) + study_reference_.update(i) + list_of_elements.append(study_reference_.to_dict()) + elif "id" not in i or not i["id"]: + study_reference_ = StudyReference.from_data(study_obj, i) + db.session.add(study_reference_) + list_of_elements.append(study_reference_.to_dict()) db.session.commit() - return study_reference_.to_dict() - + return list_of_elements # @api.route("/study//metadata/available_ipd/") # class StudyReferenceUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 010a251a..55828962 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -33,7 +33,7 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study, data): """Creates a new study from a dictionary""" study_arm = StudyArm(study) study_arm.update(data) From 1b2bcd4314f8cb78695d1816d8478eaa13dea977 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 27 Aug 2023 15:33:03 -0700 Subject: [PATCH 046/505] fix: restx api param names --- apis/dataset_metadata/dataset_access.py | 7 +++---- apis/dataset_metadata/dataset_consent.py | 2 +- apis/dataset_metadata/dataset_date.py | 2 +- apis/dataset_metadata/dataset_de_ident_level.py | 2 +- apis/dataset_metadata/dataset_description.py | 2 +- apis/dataset_metadata/dataset_funder.py | 2 +- apis/dataset_metadata/dataset_identifier.py | 2 +- apis/dataset_metadata/dataset_managing_organization.py | 2 +- apis/dataset_metadata/dataset_other.py | 2 +- apis/dataset_metadata/dataset_readme.py | 2 +- apis/dataset_metadata/dataset_record_keys.py | 2 +- apis/dataset_metadata/dataset_related_item.py | 2 +- apis/dataset_metadata/dataset_related_item_contributor.py | 2 +- apis/dataset_metadata/dataset_related_item_identifier.py | 2 +- apis/dataset_metadata/dataset_related_item_other.py | 2 +- apis/dataset_metadata/dataset_related_item_title.py | 2 +- apis/dataset_metadata/dataset_rights.py | 2 +- apis/dataset_metadata/dataset_subject.py | 2 +- apis/dataset_metadata/dataset_title.py | 2 +- apis/study_metadata/study_available_ipd.py | 2 +- apis/study_metadata/study_contact.py | 2 +- apis/study_metadata/study_description.py | 2 +- apis/study_metadata/study_design.py | 2 +- apis/study_metadata/study_eligibility.py | 2 +- apis/study_metadata/study_identification.py | 2 +- apis/study_metadata/study_intervention.py | 2 +- apis/study_metadata/study_ipdsharing.py | 2 +- apis/study_metadata/study_link.py | 2 +- apis/study_metadata/study_location.py | 2 +- apis/study_metadata/study_other.py | 2 +- apis/study_metadata/study_overall_official.py | 2 +- apis/study_metadata/study_reference.py | 2 +- apis/study_metadata/study_sponsors_collaborators.py | 2 +- apis/study_metadata/study_status.py | 2 +- 34 files changed, 36 insertions(+), 37 deletions(-) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index debb77ec..2286876a 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -1,7 +1,7 @@ from model import Dataset, DatasetAccess, db -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask_restx import Resource, fields +from flask import request from apis.dataset_metadata_namespace import api @@ -20,10 +20,9 @@ @api.route("/study//dataset//metadata/access") class DatasetAccessResource(Resource): - @api.doc("dataset") + @api.doc("access") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_access) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index aab860f6..e09ce8d7 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -22,7 +22,7 @@ @api.route("/study//dataset//metadata/consent") class DatasetConsentResource(Resource): - @api.doc("dataset") + @api.doc("consent") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index d2993545..e434917a 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -18,7 +18,7 @@ @api.route("/study//dataset//metadata/date") class DatasetDateResource(Resource): - @api.doc("dataset") + @api.doc("date") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 287bb899..91f04632 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -22,7 +22,7 @@ @api.route("/study//dataset//metadata/de_ident_level") class DatasetDeIdentLevelResource(Resource): - @api.doc("dataset") + @api.doc("de_ident_level") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index a0432f65..1425cba1 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -18,7 +18,7 @@ @api.route("/study//dataset//metadata/description") class DatasetDescriptionResource(Resource): - @api.doc("dataset") + @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 6c7f5189..d3e44e79 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -22,7 +22,7 @@ @api.route("/study//dataset//metadata/funder") class DatasetFunderResource(Resource): - @api.doc("dataset") + @api.doc("funder") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_identifier.py b/apis/dataset_metadata/dataset_identifier.py index 83dec166..2f2780b2 100644 --- a/apis/dataset_metadata/dataset_identifier.py +++ b/apis/dataset_metadata/dataset_identifier.py @@ -17,7 +17,7 @@ @api.route("/study//dataset//metadata/identifier") class DatasetIdentifierResource(Resource): - @api.doc("dataset") + @api.doc("identifier") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 0ea45fa1..e3f9cf42 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -18,7 +18,7 @@ @api.route("/study//dataset//metadata/managing_organization") class DatasetManagingOrganizationResource(Resource): - @api.doc("dataset") + @api.doc("managing_organization") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 5ecf56d3..5e370182 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -21,7 +21,7 @@ @api.route("/study//dataset//metadata/other") class DatasetOtherResource(Resource): - @api.doc("dataset") + @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index be4966e8..9b077ace 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -13,7 +13,7 @@ @api.route("/study//dataset//metadata/readme") class DatasetReadmeResource(Resource): - @api.doc("dataset") + @api.doc("readme") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index e3e4971d..c7c7f645 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -16,7 +16,7 @@ @api.route("/study//dataset//metadata/record_keys") class DatasetRecordKeysResource(Resource): - @api.doc("dataset") + @api.doc("record_keys") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index c01120a3..748c8d69 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -16,7 +16,7 @@ @api.route("/study//dataset//metadata/related_item") class DatasetRelatedItemResource(Resource): - @api.doc("dataset") + @api.doc("related_item") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py index d48860a8..14b27580 100644 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -18,7 +18,7 @@ @api.route("/study//dataset//metadata/related_item_identifier") class DatasetRelatedItemContributorResource(Resource): - @api.doc("dataset") + @api.doc("related_item_identifier") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py index 185fbcb5..e08b88f3 100644 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -19,7 +19,7 @@ @api.route("/study//dataset//metadata/related_item_contributor") class DatasetRelatedItemContributorResource(Resource): - @api.doc("dataset") + @api.doc("related_item_contributor") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py index c055fc26..d2328594 100644 --- a/apis/dataset_metadata/dataset_related_item_other.py +++ b/apis/dataset_metadata/dataset_related_item_other.py @@ -18,7 +18,7 @@ @api.route("/study//dataset//metadata/related_item_other") class DatasetRelatedItemContributorResource(Resource): - @api.doc("dataset") + @api.doc("related_item_other") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py index 7cc2da80..1ceaab46 100644 --- a/apis/dataset_metadata/dataset_related_item_title.py +++ b/apis/dataset_metadata/dataset_related_item_title.py @@ -18,7 +18,7 @@ @api.route("/study//dataset//metadata/related_item_title") class DatasetRelatedItemTitleResource(Resource): - @api.doc("dataset") + @api.doc("related_item_title") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 8e42db42..9ff7dd4b 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -19,7 +19,7 @@ @api.route("/study//dataset//metadata/rights") class DatasetRightsResource(Resource): - @api.doc("dataset") + @api.doc("rights") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 004b8fc9..4c763b7a 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -20,7 +20,7 @@ @api.route("/study//dataset//metadata/subject") class DatasetSubjectResource(Resource): - @api.doc("dataset") + @api.doc("subject") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 3fe94abb..016341c7 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -18,7 +18,7 @@ @api.route("/study//dataset//metadata/title") class DatasetTitleResource(Resource): - @api.doc("dataset") + @api.doc("title") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 7740ab20..f9a60c84 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -18,7 +18,7 @@ @api.route("/study//metadata/available") class StudyAvailableResource(Resource): - @api.doc("list_study") + @api.doc("available") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 46159d84..46ed1129 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -21,7 +21,7 @@ @api.route("/study//metadata/contact") class StudyContactResource(Resource): - @api.doc("list_study") + @api.doc("contact") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 04096b32..b3af6526 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -18,7 +18,7 @@ @api.route("/study//metadata/description") class StudyDescriptionResource(Resource): - @api.doc("list_study") + @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index b0bf4dbb..e9c5b431 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -33,7 +33,7 @@ @api.route("/study//metadata/design") class StudyDesignResource(Resource): - @api.doc("list_design") + @api.doc("design") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 1ea6ea71..7f5c5559 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -27,7 +27,7 @@ @api.route("/study//metadata/eligibility") class StudyEligibilityResource(Resource): - @api.doc("list_study") + @api.doc("eligibility") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 2966c44c..47cdc932 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -21,7 +21,7 @@ @api.route("/study//metadata/identification") class StudyIdentificationResource(Resource): - @api.doc("list_study") + @api.doc("identification") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 085d7182..46c23aa3 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -22,7 +22,7 @@ @api.route("/study//metadata/intervention") class StudyInterventionResource(Resource): - @api.doc("list_study") + @api.doc("intervention") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index d0c7a568..f1bd1c71 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -22,7 +22,7 @@ @api.route("/study//metadata/ipdsharing") class StudyIpdsharingResource(Resource): - @api.doc("list_study") + @api.doc("ipdsharing") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 8118ab4e..13c86c4c 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -18,7 +18,7 @@ @api.route("/study//metadata/link") class StudyLinkResource(Resource): - @api.doc("list_study") + @api.doc("link") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index f12032aa..7c69af1c 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -23,7 +23,7 @@ @api.route("/study//metadata/location") class StudyLocationResource(Resource): - @api.doc("list_study") + @api.doc("location") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 26cb0dde..5a5b4e9b 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -19,7 +19,7 @@ @api.route("/study//metadata/other") class StudyOtherResource(Resource): - @api.doc("list_study") + @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 88580444..720053bf 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -20,7 +20,7 @@ @api.route("/study//metadata/overall_official") class StudyOverallOfficialResource(Resource): - @api.doc("list_study") + @api.doc("overall_official") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 1f76d789..5907999e 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -21,7 +21,7 @@ @api.route("/study//metadata/reference") class StudyReferenceResource(Resource): - @api.doc("list_study") + @api.doc("reference") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 7d3b67d7..3997ee5c 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -25,7 +25,7 @@ @api.route("/study//metadata/sponsors_collaborators") class StudyStatusResource(Resource): - @api.doc("list_study") + @api.doc("sponsors_collaborators") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index b321e310..ea9173bc 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -22,7 +22,7 @@ @api.route("/study//metadata/status") class StudyStatusResource(Resource): - @api.doc("list_study") + @api.doc("status") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") From ccb35cf746287ceb367827eee70042b47ba7fa01 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 28 Aug 2023 09:10:43 -0700 Subject: [PATCH 047/505] fix: restx api x-fields mask removed --- app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index ef518265..f23acddc 100644 --- a/app.py +++ b/app.py @@ -15,8 +15,8 @@ def create_app(): app = Flask(__name__) # `full` if you want to see all the details - app.config.SWAGGER_UI_DOC_EXPANSION = "list" - + app.config["SWAGGER_UI_DOC_EXPANSION"] = "list" + app.config["RESTX_MASK_SWAGGER"] = False # Initialize config app.config.from_pyfile("config.py") # app.register_blueprint(api) From 74a6166be96549c23628c8efd52abad5d7300274 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 28 Aug 2023 16:11:28 +0000 Subject: [PATCH 048/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_arm.py | 2 +- apis/study_metadata/study_available_ipd.py | 2 +- apis/study_metadata/study_contact.py | 3 ++- apis/study_metadata/study_eligibility.py | 1 - apis/study_metadata/study_identification.py | 2 +- apis/study_metadata/study_intervention.py | 4 +--- apis/study_metadata/study_link.py | 3 ++- apis/study_metadata/study_location.py | 4 +--- apis/study_metadata/study_overall_official.py | 3 ++- apis/study_metadata/study_reference.py | 4 ++-- apis/study_metadata/study_sponsors_collaborators.py | 13 +++++++++---- model/study_metadata/study_arm.py | 1 + model/study_metadata/study_available_ipd.py | 1 + model/study_metadata/study_contact.py | 1 + model/study_metadata/study_description.py | 1 + model/study_metadata/study_design.py | 1 + model/study_metadata/study_eligibility.py | 1 + model/study_metadata/study_identification.py | 1 + model/study_metadata/study_intervention.py | 1 + model/study_metadata/study_ipdsharing.py | 1 + 20 files changed, 31 insertions(+), 19 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index cd89cc76..559daaee 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -33,7 +33,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_arm_ = StudyArm.query.get(i["id"]) study_arm_.update(i) list_of_elements.append(study_arm_.to_dict()) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index f9a60c84..449a9b07 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -33,7 +33,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_available_ipd_ = StudyAvailableIpd.query.get(i["id"]) study_available_ipd_.update(i) list_of_elements.append(study_available_ipd_.to_dict()) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 46ed1129..1ff774a8 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -36,7 +36,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_contact_ = StudyContact.query.get(i["id"]) study_contact_.update(i) list_of_elements.append(study_contact_.to_dict()) @@ -46,6 +46,7 @@ def post(self, study_id: int): list_of_elements.append(study_contact_.to_dict()) db.session.commit() return list_of_elements + # @api.route("/study//metadata/arm/") # class StudyArmUpdate(Resource): # def put(self, study_id: int, arm_id: int): diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 7f5c5559..40b3e27d 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -3,7 +3,6 @@ from flask import request - from apis.study_metadata_namespace import api diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 47cdc932..5c85f782 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -36,7 +36,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_identification_ = StudyIdentification.query.get(i["id"]) study_identification_.update(i) list_of_elements.append(study_identification_.to_dict()) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 46c23aa3..421108a6 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -3,7 +3,6 @@ from flask import request - from apis.study_metadata_namespace import api @@ -37,7 +36,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_intervention_ = StudyIntervention.query.get(i["id"]) study_intervention_.update(i) list_of_elements.append(study_intervention_.to_dict()) @@ -49,7 +48,6 @@ def post(self, study_id: int): return list_of_elements - # @api.route("/study//metadata/available_ipd/") # class StudyInterventionUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 13c86c4c..b7172703 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -33,7 +33,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_link_ = StudyLink.query.get(i["id"]) study_link_.update(i) list_of_elements.append(study_link_.to_dict()) @@ -43,6 +43,7 @@ def post(self, study_id: int): list_of_elements.append(study_link_.to_dict()) db.session.commit() return list_of_elements + # @api.route("/study//metadata/available_ipd/") # class StudyLinkUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 7c69af1c..e8a7be5f 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -3,7 +3,6 @@ from flask import request - from apis.study_metadata_namespace import api @@ -38,7 +37,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_location_ = StudyLocation.query.get(i["id"]) study_location_.update(i) list_of_elements.append(study_location_.to_dict()) @@ -49,7 +48,6 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/available_ipd/") # class StudyLocationUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 720053bf..cc140b5e 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -35,7 +35,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_overall_official_ = StudyOverallOfficial.query.get(i["id"]) study_overall_official_.update(i) list_of_elements.append(study_overall_official_.to_dict()) @@ -45,6 +45,7 @@ def post(self, study_id: int): list_of_elements.append(study_overall_official_.to_dict()) db.session.commit() return list_of_elements + # @api.route("/study//metadata/available_ipd/") # class StudyOverallOfficialUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 5907999e..5cd8bb7f 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -3,7 +3,6 @@ from flask import request - from apis.study_metadata_namespace import api @@ -36,7 +35,7 @@ def post(self, study_id: int): study_obj = Study.query.get(study_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: study_reference_ = StudyReference.query.get(i["id"]) study_reference_.update(i) list_of_elements.append(study_reference_.to_dict()) @@ -46,6 +45,7 @@ def post(self, study_id: int): list_of_elements.append(study_reference_.to_dict()) db.session.commit() return list_of_elements + # @api.route("/study//metadata/available_ipd/") # class StudyReferenceUpdate(Resource): # def put(self, study_id: int, available_ipd_id: int): diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 3997ee5c..3e159174 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -3,7 +3,6 @@ from flask import request - from apis.study_metadata_namespace import api @@ -38,15 +37,21 @@ def get(self, study_id: int): def post(self, study_id: int): data = request.json study_sponsors_collaborators_ = Study.query.get(study_id) - study_sponsors_collaborators_ = StudySponsorsCollaborators.from_data(study_sponsors_collaborators_, data) + study_sponsors_collaborators_ = StudySponsorsCollaborators.from_data( + study_sponsors_collaborators_, data + ) db.session.add(study_sponsors_collaborators_) db.session.commit() return study_sponsors_collaborators_.to_dict() - @api.route("/study//metadata/sponsors_collaborators/") + @api.route( + "/study//metadata/sponsors_collaborators/" + ) class StudySponsorsCollaboratorsUpdate(Resource): def put(self, study_id: int, sponsors_collaborators_id: int): - study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get(sponsors_collaborators_id) + study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( + sponsors_collaborators_id + ) study_sponsors_collaborators_.update(request.json) db.session.commit() return study_sponsors_collaborators_.to_dict() diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 55828962..218e2693 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -11,6 +11,7 @@ class StudyArm(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_arm" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 72fca85b..50f03bc1 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -9,6 +9,7 @@ class StudyAvailableIpd(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_available_ipd" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 20d87091..f510d51b 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -8,6 +8,7 @@ class StudyContact(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_contact" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index f61c986b..3efc10fc 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -8,6 +8,7 @@ class StudyDescription(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_description" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index bd7eafb8..612e83e1 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -11,6 +11,7 @@ class StudyDesign(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_design" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index bbefe1b7..485d248d 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -10,6 +10,7 @@ class StudyEligibility(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_eligibility" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index f73e2f39..92366834 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -8,6 +8,7 @@ class StudyIdentification(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_identification" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index c45fef11..584a1b65 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -10,6 +10,7 @@ class StudyIntervention(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_intervention" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 83811389..fccd1622 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -10,6 +10,7 @@ class StudyIpdsharing(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + __tablename__ = "study_ipdsharing" id = db.Column(db.CHAR(36), primary_key=True) From 2ac1ac758863df8d45b6f2e9f164833f310142c5 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 28 Aug 2023 15:35:12 -0700 Subject: [PATCH 049/505] fix: dataabase changes --- apis/__init__.py | 2 +- ...ier.py => dataset_alternate_identifier.py} | 28 +++++++++++++------ apis/study_metadata/study_link.py | 3 ++ model/__init__.py | 4 +-- model/dataset.py | 2 +- ...ier.py => dataset_alternate_identifier.py} | 11 +++----- model/study_metadata/study_eligibility.py | 20 ++++++++----- 7 files changed, 43 insertions(+), 27 deletions(-) rename apis/dataset_metadata/{dataset_identifier.py => dataset_alternate_identifier.py} (54%) rename model/dataset_metadata/{dataset_identifier.py => dataset_alternate_identifier.py} (75%) diff --git a/apis/__init__.py b/apis/__init__.py index 0db0a90f..df047dc1 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -31,7 +31,7 @@ from .dataset_metadata.dataset_consent import api as consent from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_description import api as description -from .dataset_metadata.dataset_identifier import api as identifier +from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_date import api as date from .dataset_metadata.dataset_de_ident_level import api as de_ident_level diff --git a/apis/dataset_metadata/dataset_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py similarity index 54% rename from apis/dataset_metadata/dataset_identifier.py rename to apis/dataset_metadata/dataset_alternate_identifier.py index 2f2780b2..238d34b2 100644 --- a/apis/dataset_metadata/dataset_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,11 +1,11 @@ -from model import Dataset, db, DatasetIdentifier +from model import Dataset, db, DatasetAlternateIdentifier from flask_restx import Resource, fields from flask import request from apis.dataset_metadata_namespace import api dataset_identifier = api.model( - "DatasetIdentifier", + "DatasetAlternateIdentifier", { "id": fields.String(required=True), "identifier": fields.String(required=True), @@ -16,7 +16,7 @@ @api.route("/study//dataset//metadata/identifier") -class DatasetIdentifierResource(Resource): +class DatasetAlternateIdentifierResource(Resource): @api.doc("identifier") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -24,23 +24,33 @@ class DatasetIdentifierResource(Resource): @api.marshal_with(dataset_identifier) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) - dataset_identifier_ = dataset_.dataset_identifier + dataset_identifier_ = dataset_.dataset_alternate_identifier return [d.to_dict() for d in dataset_identifier_] def post(self, study_id: int, dataset_id: int): data = request.json data_obj = Dataset.query.get(dataset_id) - dataset_identifier_ = DatasetIdentifier.from_data(data_obj, data) - db.session.add(dataset_identifier_) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + dataset_identifier_ = DatasetAlternateIdentifier.query.get(i["id"]) + if dataset_identifier_ == None: + return f"Study link {i['id']} Id is not found", 404 + dataset_identifier_.update(i) + list_of_elements.append(dataset_identifier_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_identifier_ = DatasetAlternateIdentifier.from_data(data_obj, i) + db.session.add(dataset_identifier_) + list_of_elements.append(dataset_identifier_.to_dict()) db.session.commit() - return dataset_identifier_.to_dict() + return list_of_elements @api.route( "/study//dataset//metadata/identifier/" ) - class DatasetIdentifierUpdate(Resource): + class DatasetAlternateIdentifierUpdate(Resource): def put(self, study_id: int, dataset_id: int, identifier_id: int): - dataset_identifier_ = DatasetIdentifier.query.get(identifier_id) + dataset_identifier_ = DatasetAlternateIdentifier.query.get(identifier_id) dataset_identifier_.update(request.json) db.session.commit() return dataset_identifier_.to_dict() diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 13c86c4c..4b6c461e 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -35,12 +35,15 @@ def post(self, study_id: int): for i in data: if 'id' in i and i["id"]: study_link_ = StudyLink.query.get(i["id"]) + if study_link_ == None: + return f"Study link {i['id']} Id is not found", 404 study_link_.update(i) list_of_elements.append(study_link_.to_dict()) elif "id" not in i or not i["id"]: study_link_ = StudyLink.from_data(study_obj, i) db.session.add(study_link_) list_of_elements.append(study_link_.to_dict()) + db.session.commit() return list_of_elements # @api.route("/study//metadata/available_ipd/") diff --git a/model/__init__.py b/model/__init__.py index 40b69469..3cb90606 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -19,7 +19,7 @@ from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder -from .dataset_metadata.dataset_identifier import DatasetIdentifier +from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_managing_organization import DatasetManagingOrganization from .dataset_metadata.dataset_other import DatasetOther from .dataset_metadata.dataset_readme import DatasetReadme @@ -75,7 +75,7 @@ "DatasetDeIdentLevel", "DatasetContributorAffiliation", "DatasetFunder", - "DatasetIdentifier", + "DatasetAlternateIdentifier", "DatasetManagingOrganization", "DatasetRights", "DatasetReadme", diff --git a/model/dataset.py b/model/dataset.py index cc6c1695..f61fb73b 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -40,7 +40,7 @@ def __init__(self, study): ) dataset_funder = db.relationship("DatasetFunder", back_populates="dataset") - dataset_identifier = db.relationship("DatasetIdentifier", back_populates="dataset") + dataset_alternate_identifier = db.relationship("DatasetAlternateIdentifier", back_populates="dataset") dataset_managing_organization = db.relationship( "DatasetManagingOrganization", back_populates="dataset" ) diff --git a/model/dataset_metadata/dataset_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py similarity index 75% rename from model/dataset_metadata/dataset_identifier.py rename to model/dataset_metadata/dataset_alternate_identifier.py index 572cba5f..47aa7068 100644 --- a/model/dataset_metadata/dataset_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -2,35 +2,32 @@ from ..db import db -class DatasetIdentifier(db.Model): +class DatasetAlternateIdentifier(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset - __tablename__ = "dataset_identifier" + __tablename__ = "dataset_alternate_identifier" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) identifier_type = db.Column(db.String, nullable=False) - alternate = db.Column(db.BOOLEAN, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) - dataset = db.relationship("Dataset", back_populates="dataset_identifier") + dataset = db.relationship("Dataset", back_populates="dataset_alternate_identifier") def to_dict(self): return { "id": self.id, "identifier": self.identifier, "identifier_type": self.identifier_type, - "alternate": self.alternate, } @staticmethod def from_data(dataset, data: dict): - dataset_date = DatasetIdentifier(dataset) + dataset_date = DatasetAlternateIdentifier(dataset) dataset_date.update(data) return dataset_date def update(self, data): self.identifier = data["identifier"] self.identifier_type = data["identifier_type"] - self.alternate = data["alternate"] diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index bbefe1b7..b3867c57 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -14,10 +14,12 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) gender = db.Column(db.String, nullable=False) - gender_based = db.Column(db.BOOLEAN, nullable=False) + gender_based = db.Column(db.String, nullable=False) gender_description = db.Column(db.String, nullable=False) - minimum_age = db.Column(db.String, nullable=False) - maximum_age = db.Column(db.String, nullable=False) + minimum_age_value = db.Column(db.Integer, nullable=False) + maximum_age_value = db.Column(db.Integer, nullable=False) + minimum_age_unit = db.Column(db.String, nullable=False) + maximum_age_unit = db.Column(db.String, nullable=False) healthy_volunteers = db.Column(db.BOOLEAN, nullable=False) inclusion_criteria = db.Column(ARRAY(String), nullable=False) exclusion_criteria = db.Column(ARRAY(String), nullable=False) @@ -34,8 +36,10 @@ def to_dict(self): "gender": self.gender, "gender_based": self.gender_based, "gender_description": self.gender_description, - "minimum_age": self.minimum_age, - "maximum_age": self.maximum_age, + "minimum_age_unit": self.minimum_age_unit, + "maximum_age_unit": self.maximum_age_unit, + "minimum_age_value": self.minimum_age_value, + "maximum_age_value": self.maximum_age_value, "healthy_volunteers": self.healthy_volunteers, "inclusion_criteria": self.inclusion_criteria, "exclusion_criteria": self.exclusion_criteria, @@ -56,8 +60,10 @@ def update(self, data): self.gender = data["gender"] self.gender_based = data["gender_based"] self.gender_description = data["gender_description"] - self.minimum_age = data["minimum_age"] - self.maximum_age = data["maximum_age"] + self.minimum_age_value = data["minimum_age_value"] + self.minimum_age_unit = data["minimum_age_unit"] + self.maximum_age_unit = data["maximum_age_unit"] + self.maximum_age_value = data["maximum_age_value"] self.healthy_volunteers = data["healthy_volunteers"] self.inclusion_criteria = data["inclusion_criteria"] self.exclusion_criteria = data["exclusion_criteria"] From 030218abe3fee223b2f9ed7cefdc734f88c75aa5 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 28 Aug 2023 22:36:03 +0000 Subject: [PATCH 050/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dataset_metadata/dataset_alternate_identifier.py | 2 +- model/dataset.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 238d34b2..745bfe94 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -32,7 +32,7 @@ def post(self, study_id: int, dataset_id: int): data_obj = Dataset.query.get(dataset_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: dataset_identifier_ = DatasetAlternateIdentifier.query.get(i["id"]) if dataset_identifier_ == None: return f"Study link {i['id']} Id is not found", 404 diff --git a/model/dataset.py b/model/dataset.py index f61fb73b..79b1bc7e 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -40,7 +40,9 @@ def __init__(self, study): ) dataset_funder = db.relationship("DatasetFunder", back_populates="dataset") - dataset_alternate_identifier = db.relationship("DatasetAlternateIdentifier", back_populates="dataset") + dataset_alternate_identifier = db.relationship( + "DatasetAlternateIdentifier", back_populates="dataset" + ) dataset_managing_organization = db.relationship( "DatasetManagingOrganization", back_populates="dataset" ) From 21175f4eab469fe2851af5aed27b30c898199e1c Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 29 Aug 2023 15:16:22 -0700 Subject: [PATCH 051/505] fix: participants endpoints --- apis/participant.py | 13 ++++++++----- model/dataset.py | 3 +-- model/dataset_version.py | 1 - model/participant.py | 9 ++++----- model/study.py | 1 - 5 files changed, 13 insertions(+), 14 deletions(-) diff --git a/apis/participant.py b/apis/participant.py index 36c17f41..74327f32 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -24,18 +24,20 @@ class AddParticipant(Resource): @api.doc("participants") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.param("id", "Adding participants") @api.marshal_with(participant_model) def get(self, study_id: int): participants = Participant.query.all() return [p.to_dict() for p in participants] + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(participant_model) def post(self, study_id: int): study = Study.query.get(study_id) add_participant = Participant.from_data(request.json, study) db.session.add(add_participant) db.session.commit() - return jsonify(add_participant.to_dict()), 201 + return add_participant.to_dict(), 201 @api.route("/study//participants/") @@ -43,14 +45,15 @@ class UpdateParticipant(Resource): @api.doc("participants") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.param("id", "Adding participants") - # @api.marshal_with(participants) + @api.marshal_with(participant_model) def put(self, study_id, participant_id: int): update_participant = Participant.query.get(participant_id) update_participant.update(request.json) db.session.commit() - return jsonify(update_participant.to_dict()) + return update_participant.to_dict() + @api.response(200, "Success") + @api.response(400, "Validation Error") def delete(self, study_id, participant_id: int): delete_participant = Participant.query.get(participant_id) db.session.delete(delete_participant) diff --git a/model/dataset.py b/model/dataset.py index f61fb73b..0ad2c8b9 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -60,8 +60,7 @@ def to_dict(self): return { "id": self.id, - "updated_on": str(datetime.now()), - "created_at": str(datetime.now()), + "created_at": str(self.created_at), # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None, } diff --git a/model/dataset_version.py b/model/dataset_version.py index 263030c3..8d20c383 100644 --- a/model/dataset_version.py +++ b/model/dataset_version.py @@ -56,7 +56,6 @@ def update(self, data): self.title = data["title"] self.published = data["published"] self.doi = data["doi"] - self.created_at = data["created_at"] self.published_on = data["published_on"] self.participants[:] = data["participants"] self.changelog = data["changelog"] diff --git a/model/participant.py b/model/participant.py index 18cdb3d4..b8947b37 100644 --- a/model/participant.py +++ b/model/participant.py @@ -9,7 +9,7 @@ class Participant(db.Model): def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) - + self.created_at = datetime.now() __tablename__ = "participant" id = db.Column(db.CHAR(36), primary_key=True) first_name = db.Column(db.String, nullable=False) @@ -34,8 +34,8 @@ def to_dict(self): "last_name": self.last_name, "address": self.address, "age": self.age, - "created_at": str(datetime.now()), - "published_on": str(datetime.now()), + "created_at": str(self.created_at), + "updated_on": str(self.updated_on) } @staticmethod @@ -50,5 +50,4 @@ def update(self, data): self.last_name = data["last_name"] self.address = data["address"] self.age = data["age"] - self.created_at = data["created_at"] - self.updated_on = data["updated_on"] + self.updated_on = datetime.now() diff --git a/model/study.py b/model/study.py index 0de969c6..fcbba374 100644 --- a/model/study.py +++ b/model/study.py @@ -76,7 +76,6 @@ def update(self, data): self.title = data["title"] self.image = data["image"] # self.user = model.User.from_data(data["user"]) - self.created_at = data["created_at"] self.updated_on = data["updated_on"] def validate(self): From f1efb880774edfdd898de53c6f2aa940600fdbe3 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 30 Aug 2023 13:35:26 -0700 Subject: [PATCH 052/505] fix: resx api params --- apis/contributor.py | 5 ++++- apis/dataset.py | 2 -- .../dataset_alternate_identifier.py | 1 - apis/dataset_metadata/dataset_consent.py | 17 +++++++++++---- apis/dataset_metadata/dataset_date.py | 1 - .../dataset_de_ident_level.py | 1 - apis/dataset_metadata/dataset_description.py | 1 - apis/dataset_metadata/dataset_funder.py | 1 - .../dataset_managing_organization.py | 1 - apis/dataset_metadata/dataset_other.py | 1 - apis/dataset_metadata/dataset_record_keys.py | 21 ++++++++++++++----- apis/invited_contributor.py | 0 apis/study_metadata/study_available_ipd.py | 1 - apis/study_metadata/study_contact.py | 1 - 14 files changed, 33 insertions(+), 21 deletions(-) create mode 100644 apis/invited_contributor.py diff --git a/apis/contributor.py b/apis/contributor.py index 622d53e1..f4e096e9 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -20,8 +20,11 @@ class AddParticipant(Resource): @api.doc("contributor list") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.param("id", "The contributor identifier") @api.marshal_with(contributors_model) def get(self, study_id: int): contributors = StudyContributor.query.all() return [c.to_dict() for c in contributors] + + def post(self, study_id: int): + contributors = StudyContributor.query.all() + diff --git a/apis/dataset.py b/apis/dataset.py index 51131701..d4669505 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -36,7 +36,6 @@ class DatasetList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("add dataset", params={"id": "An ID"}) @api.marshal_with(dataset) # @api.expect(body=dataset) def get(self, study_id): @@ -75,7 +74,6 @@ class Version(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("dataset version") - @api.param("id", "Adding version") @api.marshal_with(dataset_versions_model) def get(self, study_id, dataset_id, version_id): dataset_version = DatasetVersion.query.get(version_id) diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 238d34b2..82b7b1ce 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -20,7 +20,6 @@ class DatasetAlternateIdentifierResource(Resource): @api.doc("identifier") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_identifier) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index e09ce8d7..48ac0627 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -25,7 +25,6 @@ class DatasetConsentResource(Resource): @api.doc("consent") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_consent) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) @@ -35,10 +34,20 @@ def get(self, study_id: int, dataset_id: int): def post(self, study_id: int, dataset_id: int): data = request.json data_obj = Dataset.query.get(dataset_id) - dataset_consent_ = DatasetConsent.from_data(data_obj, data) - db.session.add(dataset_consent_) + list_of_elements = [] + for i in data: + if 'id' in i and i["id"]: + dataset_consent_ = DatasetConsent.query.get(i["id"]) + if dataset_consent_ == None: + return f"Study link {i['id']} Id is not found", 404 + dataset_consent_.update(i) + list_of_elements.append(dataset_consent_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_consent_ = DatasetConsent.from_data(data_obj, i) + db.session.add(dataset_consent_) + list_of_elements.append(dataset_consent_.to_dict()) db.session.commit() - return dataset_consent_.to_dict() + return list_of_elements @api.route("/study//dataset//metadata/consent/") class DatasetAccessUpdate(Resource): diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index e434917a..afe8fe10 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -21,7 +21,6 @@ class DatasetDateResource(Resource): @api.doc("date") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_date) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 91f04632..8526adcf 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -25,7 +25,6 @@ class DatasetDeIdentLevelResource(Resource): @api.doc("de_ident_level") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(de_ident_level) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 1425cba1..473e0c38 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -21,7 +21,6 @@ class DatasetDescriptionResource(Resource): @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_description) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index d3e44e79..7b1d3e66 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -25,7 +25,6 @@ class DatasetFunderResource(Resource): @api.doc("funder") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_funder) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index e3f9cf42..c5ccc766 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -21,7 +21,6 @@ class DatasetManagingOrganizationResource(Resource): @api.doc("managing_organization") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(managing_organization) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 5e370182..ac3542ed 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -24,7 +24,6 @@ class DatasetOtherResource(Resource): @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_other) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index c7c7f645..0c46a8c4 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,7 +1,7 @@ from model import Dataset, DatasetRecordKeys, db -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask_restx import Resource, fields +from flask import request from apis.dataset_metadata_namespace import api dataset_record_keys = api.model( @@ -29,10 +29,21 @@ def get(self, study_id: int, dataset_id: int): def post(self, study_id: int, dataset_id: int): data = request.json data_obj = Dataset.query.get(dataset_id) - dataset_record_keys_ = DatasetRecordKeys.from_data(data_obj, data) - db.session.add(dataset_record_keys_) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_record_keys_ = DatasetRecordKeys.query.get(i["id"]) + if dataset_record_keys_ == None: + return f"Study link {i['id']} Id is not found", 404 + dataset_record_keys_.update(i) + list_of_elements.append(dataset_record_keys_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_record_keys_ = DatasetRecordKeys.from_data(data_obj, i) + db.session.add(dataset_record_keys_) + list_of_elements.append(dataset_record_keys_.to_dict()) db.session.commit() - return dataset_record_keys_.to_dict() + return list_of_elements + @api.route( "/study//dataset//metadata/record_keys/" diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py new file mode 100644 index 00000000..e69de29b diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 449a9b07..91adfdf5 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -21,7 +21,6 @@ class StudyAvailableResource(Resource): @api.doc("available") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") @api.marshal_with(study_available) def get(self, study_id: int): study_ = Study.query.get(study_id) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 1ff774a8..6252ba1a 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -24,7 +24,6 @@ class StudyContactResource(Resource): @api.doc("contact") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") @api.marshal_with(study_contact) def get(self, study_id: int): study_ = Study.query.get(study_id) From 99da9f799680ee83c4a042537100d9642129dc54 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 30 Aug 2023 13:58:47 -0700 Subject: [PATCH 053/505] feat: added study other, description results to GET study --- apis/study.py | 6 ------ model/study.py | 8 +++++--- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/apis/study.py b/apis/study.py index d5647c6d..61803787 100644 --- a/apis/study.py +++ b/apis/study.py @@ -24,12 +24,8 @@ "Study", { "id": fields.String(required=True), - "name": fields.String(required=True), "title": fields.String(required=True), - "description": fields.String(required=True), "image": fields.String(required=True), - "size": fields.String(required=True), - "keywords": fields.String(required=True), "last_updated": fields.String(required=True), "owner": fields.Nested(owner, required=True), }, @@ -41,7 +37,6 @@ class Studies(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.param("id", "The study identifier") # @api.marshal_with(study) def get(self): studies = Study.query.all() @@ -59,7 +54,6 @@ class StudyResource(Resource): @api.doc("get study") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.param("id", "The study identifier") # @api.marshal_with(study) def get(self, study_id: int): study1 = Study.query.get(study_id) diff --git a/model/study.py b/model/study.py index fcbba374..9d00a74e 100644 --- a/model/study.py +++ b/model/study.py @@ -1,6 +1,6 @@ import uuid from datetime import datetime - +from flask import jsonify import model @@ -32,7 +32,7 @@ def __init__(self): study_arm = db.relationship("StudyArm", back_populates="study") study_available_ipd = db.relationship("StudyAvailableIpd", back_populates="study") study_contact = db.relationship("StudyContact", back_populates="study") - study_description = db.relationship("StudyDescription", back_populates="study") + study_description = db.relationship("StudyDescription", uselist=False, back_populates="study") study_design = db.relationship("StudyDesign", back_populates="study") study_eligibility = db.relationship("StudyEligibility", back_populates="study") study_identification = db.relationship( @@ -42,7 +42,7 @@ def __init__(self): study_ipdsharing = db.relationship("StudyIpdsharing", back_populates="study") study_link = db.relationship("StudyLink", back_populates="study") study_location = db.relationship("StudyLocation", back_populates="study") - study_other = db.relationship("StudyOther", back_populates="study") + study_other = db.relationship("StudyOther", uselist=False, back_populates="study") study_overall_official = db.relationship( "StudyOverallOfficial", back_populates="study" ) @@ -61,6 +61,8 @@ def to_dict(self): "created_at": str(self.created_at), "updated_on": str(self.updated_on), # "study_contributors": self.study_contributors.to_dict(), + "size": self.study_other.size if self.study_other else None, + "description": self.study_description.brief_summary if self.study_description else None } @staticmethod From 17fdec66927ed29fb5f397f654fc17936862969b Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 30 Aug 2023 20:59:28 +0000 Subject: [PATCH 054/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 1 - apis/dataset_metadata/dataset_consent.py | 2 +- apis/dataset_metadata/dataset_record_keys.py | 1 - model/participant.py | 3 ++- model/study.py | 10 +++++++--- 5 files changed, 10 insertions(+), 7 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index f4e096e9..ed6268ee 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -27,4 +27,3 @@ def get(self, study_id: int): def post(self, study_id: int): contributors = StudyContributor.query.all() - diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 48ac0627..96d9f564 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -36,7 +36,7 @@ def post(self, study_id: int, dataset_id: int): data_obj = Dataset.query.get(dataset_id) list_of_elements = [] for i in data: - if 'id' in i and i["id"]: + if "id" in i and i["id"]: dataset_consent_ = DatasetConsent.query.get(i["id"]) if dataset_consent_ == None: return f"Study link {i['id']} Id is not found", 404 diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 0c46a8c4..af231743 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -44,7 +44,6 @@ def post(self, study_id: int, dataset_id: int): db.session.commit() return list_of_elements - @api.route( "/study//dataset//metadata/record_keys/" ) diff --git a/model/participant.py b/model/participant.py index b8947b37..027f1371 100644 --- a/model/participant.py +++ b/model/participant.py @@ -10,6 +10,7 @@ def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) self.created_at = datetime.now() + __tablename__ = "participant" id = db.Column(db.CHAR(36), primary_key=True) first_name = db.Column(db.String, nullable=False) @@ -35,7 +36,7 @@ def to_dict(self): "address": self.address, "age": self.age, "created_at": str(self.created_at), - "updated_on": str(self.updated_on) + "updated_on": str(self.updated_on), } @staticmethod diff --git a/model/study.py b/model/study.py index 9d00a74e..58d0cc6c 100644 --- a/model/study.py +++ b/model/study.py @@ -32,7 +32,9 @@ def __init__(self): study_arm = db.relationship("StudyArm", back_populates="study") study_available_ipd = db.relationship("StudyAvailableIpd", back_populates="study") study_contact = db.relationship("StudyContact", back_populates="study") - study_description = db.relationship("StudyDescription", uselist=False, back_populates="study") + study_description = db.relationship( + "StudyDescription", uselist=False, back_populates="study" + ) study_design = db.relationship("StudyDesign", back_populates="study") study_eligibility = db.relationship("StudyEligibility", back_populates="study") study_identification = db.relationship( @@ -42,7 +44,7 @@ def __init__(self): study_ipdsharing = db.relationship("StudyIpdsharing", back_populates="study") study_link = db.relationship("StudyLink", back_populates="study") study_location = db.relationship("StudyLocation", back_populates="study") - study_other = db.relationship("StudyOther", uselist=False, back_populates="study") + study_other = db.relationship("StudyOther", uselist=False, back_populates="study") study_overall_official = db.relationship( "StudyOverallOfficial", back_populates="study" ) @@ -62,7 +64,9 @@ def to_dict(self): "updated_on": str(self.updated_on), # "study_contributors": self.study_contributors.to_dict(), "size": self.study_other.size if self.study_other else None, - "description": self.study_description.brief_summary if self.study_description else None + "description": self.study_description.brief_summary + if self.study_description + else None, } @staticmethod From d3cfc5d0182bb4f92020020060d91bc6c99a8c3a Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 31 Aug 2023 11:56:31 -0700 Subject: [PATCH 055/505] chore: updated SQL file --- init/all_tables.sql | 226 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 196 insertions(+), 30 deletions(-) diff --git a/init/all_tables.sql b/init/all_tables.sql index 6afa9626..a5113418 100644 --- a/init/all_tables.sql +++ b/init/all_tables.sql @@ -5,30 +5,46 @@ -- HeidiSQL Version: 12.3.0.6589 -- -------------------------------------------------------- -/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; -/*!40101 SET NAMES */; -/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; -/*!40103 SET TIME_ZONE='+00:00' */; -/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; -/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; -/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; - +BEGIN; -- Dumping data for table public.dataset: -1 rows /*!40000 ALTER TABLE "dataset" DISABLE KEYS */; INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'), ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'); + ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000001', '2023-08-15 16:53:05.257623', '2023-08-15 16:53:05.257623', '00000000-0000-0000-0000-000000000001'), + ('b210863a-2bee-4eaf-aad8-999b7a7cae06', '2023-08-20 22:31:18.830152', '2023-08-20 22:31:18.830152', '00000000-0000-0000-0000-000000000001'), + ('89aa8ffb-48b5-49c3-92c4-9b90fbcc736f', '2023-08-29 13:46:58.847208', '2023-08-29 13:46:58.847208', '00000000-0000-0000-0000-000000000001'), + ('e6c4cde9-f769-457e-a1ee-2a6c6dd76609', '2023-08-29 13:54:00.410672', '2023-08-29 13:54:00.410672', '00000000-0000-0000-0000-000000000001'), + ('8c510e24-2fb3-4abb-8712-5b4d6c429d15', '2023-08-29 13:54:28.093018', '2023-08-29 13:54:28.093018', '00000000-0000-0000-0000-000000000001'), + ('151e9c0b-20b3-4558-9eed-51830a708899', '2023-08-29 15:02:18.766003', '2023-08-29 15:02:18.766003', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; -- Dumping data for table public.dataset_access: -1 rows /*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; +INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'main', 'Clinical research studies ', 'https://aireadi.org', '1st August', NULL), + ('badac1ab-26fd-4f94-b2b4-b198365a198f', 'none', '', '', '', NULL), + ('6d2c020f-71b1-48d2-8532-89a563868fa4', 'none', '', '', '', NULL), + ('f8f3bf91-2eb9-49b8-a8f0-1c92def99bcf', 'none', '', '', '', NULL), + ('fdc10b6d-2dc6-41c1-b43e-202a24abc80a', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'), + ('395d37d9-e3cf-4989-81f6-21dd2202d1ca', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; +-- Dumping data for table public.dataset_alternate_identifier: 3 rows +/*!40000 ALTER TABLE "dataset_alternate_identifier" DISABLE KEYS */; +INSERT INTO "dataset_alternate_identifier" ("id", "identifier", "identifier_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', '00000000-0000-0000-0000-000000000001'), + ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_alternate_identifier" ENABLE KEYS */; + -- Dumping data for table public.dataset_consent: -1 rows /*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; +INSERT INTO "dataset_consent" ("id", "type", "noncommercial", "geog_restrict", "research_type", "genetic_only", "no_methods", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'), + ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; -- Dumping data for table public.dataset_contributor: -1 rows @@ -43,46 +59,82 @@ INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", -- Dumping data for table public.dataset_date: -1 rows /*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; +INSERT INTO "dataset_date" ("id", "date", "date_type", "data_information", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000005'), + ('0b1775e5-d110-482f-a1c4-2aa3947b8db8', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'), + ('dc090dbd-6fa3-4b61-829e-2f139bdbd116', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; -- Dumping data for table public.dataset_description: -1 rows /*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; +INSERT INTO "dataset_description" ("id", "description", "description_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000004'), + ('78f2b774-2f5a-4096-b82e-9923ca04395b', '', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', '', '', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; -- Dumping data for table public.dataset_de_ident_level: -1 rows /*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; +INSERT INTO "dataset_de_ident_level" ("id", "type", "direct", "hipaa", "dates", "nonarr", "k_anon", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'NA', 'false', 'true', 'false', 'true', 'false', 'none', '00000000-0000-0000-0000-000000000002'), + ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'), + ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; -- Dumping data for table public.dataset_funder: -1 rows /*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; +INSERT INTO "dataset_funder" ("id", "name", "identifier", "identifier_type", "identifier_scheme_uri", "award_number", "award_uri", "award_title", "dataset_id") VALUES + ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; --- Dumping data for table public.dataset_identifier: -1 rows -/*!40000 ALTER TABLE "dataset_identifier" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_identifier" ENABLE KEYS */; - -- Dumping data for table public.dataset_managing_organization: -1 rows /*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; +INSERT INTO "dataset_managing_organization" ("id", "name", "ror_id", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', '00000000-0000-0000-0000-000000000001'), + ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; -- Dumping data for table public.dataset_other: -1 rows /*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; +INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "managing_organization_ror_id", "size", "standards_followed", "acknowledgement", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000002'), + ('2fca4640-6f0e-406c-8c7a-e93a0740b9c6', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org', 'NA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping data for table public.dataset_readme: -1 rows /*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping data for table public.dataset_record_keys: -1 rows /*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; +INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES + ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), + ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; -- Dumping data for table public.dataset_related_item: -1 rows /*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; +INSERT INTO "dataset_related_item" ("id", "type", "relation_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'main', 'main', '00000000-0000-0000-0000-000000000002'), + ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; -- Dumping data for table public.dataset_related_item_contributor: -1 rows /*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; +INSERT INTO "dataset_related_item_contributor" ("id", "name", "name_type", "creator", "contributor_type", "dataset_related_item_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'string', 'true', 'owner', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; -- Dumping data for table public.dataset_related_item_identifier: -1 rows @@ -99,25 +151,24 @@ INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", -- Dumping data for table public.dataset_rights: -1 rows /*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; +INSERT INTO "dataset_rights" ("id", "rights", "uri", "identifier", "identifier_scheme", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'), + ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; -- Dumping data for table public.dataset_subject: -1 rows /*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; +INSERT INTO "dataset_subject" ("id", "subject", "scheme", "scheme_uri", "value_uri", "classification_code", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'), + ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; -- Dumping data for table public.dataset_title: -1 rows /*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; +INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES + ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; --- Dumping data for table public.dataset_version: -1 rows -/*!40000 ALTER TABLE "dataset_version" DISABLE KEYS */; -INSERT INTO "dataset_version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'AIREADI1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'AIREADI4', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000003', 'AIREADI3', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); -/*!40000 ALTER TABLE "dataset_version" ENABLE KEYS */; - -- Dumping data for table public.invited_study_contributor: -1 rows /*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES @@ -129,10 +180,18 @@ INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited -- Dumping data for table public.participant: -1 rows /*!40000 ALTER TABLE "participant" DISABLE KEYS */; INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova', '1221d kibler drive', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000004'); + ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000004'), + ('921ba857-dd08-4149-8f5c-245c6c93ef84', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:23.627034', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('458d2c15-6ed8-4f70-a47d-70b42f2f1b86', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:36.656094', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('35750167-40c5-4f4a-9d8e-ebe89c2efcfc', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:52.555088', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:59.614647', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('b444520d-0eac-4065-a86d-004481f68d8a', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:45:49.495595', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('88c7592a-4382-4d6b-a197-e880e49db3c0', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:46:17.682171', '2023-08-29 13:46:17.682171', '00000000-0000-0000-0000-000000000001'), + ('ba73ed99-6ec2-46e0-acdb-4a00c31dd572', 'aydan', 'gasimova', '1221d kibler drive', '20', '2023-08-29 15:08:03.758771', '2023-08-29 15:08:03.758771', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-13 16:33:53', '2023-08-29 15:09:04.323914', '00000000-0000-0000-0000-000000000001'), + ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-29 15:15:35.891076', '2023-08-29 15:15:35.891076', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "participant" ENABLE KEYS */; -- Dumping data for table public.study: -1 rows @@ -150,14 +209,33 @@ INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES -- Dumping data for table public.study_arm: -1 rows /*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; +INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Active Comparator', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000003'), + ('75edc7d3-ab7c-404d-a6dd-b55f7fe6446d', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('2b26a772-b4af-4e61-9e76-6642746b78ee', '', '', '', '{""}', '00000000-0000-0000-0000-000000000001'), + ('a82a5e49-a735-4ba3-ab2e-ba64e7fb464c', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000002'), + ('ba03826c-b9db-4517-aeaa-031793de4a25', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('a11728f0-fadb-4bd0-be09-511d5fb39649', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('311fed5e-fd7a-4a02-8465-3b55a05cab04', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; --- Dumping data for table public.study_available: -1 rows -/*!40000 ALTER TABLE "study_available" DISABLE KEYS */; -/*!40000 ALTER TABLE "study_available" ENABLE KEYS */; +-- Dumping data for table public.study_available_ipd: -1 rows +/*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; +INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'); +/*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; -- Dumping data for table public.study_contact: -1 rows /*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; +INSERT INTO "study_contact" ("id", "first_name", "last_name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'holly', 'sienna', 'calmi2', 'editor', '4056074345', 'ext', 'holly.sienna@gmail.com', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'), + ('81e71d41-2c93-47cb-9fac-00d94ab1c1a2', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; -- Dumping data for table public.study_contributor: -1 rows @@ -171,54 +249,138 @@ INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES -- Dumping data for table public.study_description: -1 rows /*!40000 ALTER TABLE "study_description" DISABLE KEYS */; +INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000003'), + ('f51a772e-373a-452a-8106-822840a76339', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_description" ENABLE KEYS */; -- Dumping data for table public.study_design: -1 rows /*!40000 ALTER TABLE "study_design" DISABLE KEYS */; +INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_interventional_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Randomized', 'type', 'treatment of cancer', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '1 years', 10, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'Randomized', 'type', 'treatment', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{casecontrol}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3 months', 10, '00000000-0000-0000-0000-000000000002'), + ('2b1312ef-338b-454a-9e17-5db84e17d97c', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{[,'',P,a,r,t,i,c,i,p,a,n,t,'',]}', '{Trials}', 1, 'enrollmentInfo', 2, '{[,'',C,a,s,e,C,o,n,t,r,o,l,'',]}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), + ('ca5500a4-cbce-454a-a767-653461d59397', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{CaseControl}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_design" ENABLE KEYS */; --- Dumping data for table public.study_eligibility: 0 rows +-- Dumping data for table public.study_eligibility: 6 rows /*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; +INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id", "minimum_age_value", "minimum_age_unit", "maximum_age_value", "maximum_age_unit") VALUES + ('00000000-0000-0000-0000-000000000004', 'female', 'Correct', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 30, 'UCSD', 54, 'UW'), + ('dfac0d9e-a104-4f4b-ac1d-05f3699c72f3', 'female', 'Not given', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 23, 'UCSD', 32, 'UW'), + ('00000000-0000-0000-0000-000000000002', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 45, 'UCLA', 43, 'UCLA'), + ('00000000-0000-0000-0000-000000000001', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 24, 'UCSD', 34, 'UCLA'), + ('00000000-0000-0000-0000-000000000003', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 56, 'UCLA', 37, 'UCSD'), + ('01ac64ef-cfca-47bc-8f30-67525017461f', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 34, 'UW', 29, 'UW'); /*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; -- Dumping data for table public.study_identification: -1 rows /*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; +INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), + ('cfc1b66c-882a-4eee-a6d7-01a7cb018ac2', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; -- Dumping data for table public.study_intervention: -1 rows /*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; +INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'Drug', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Procedure/Surgery', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'Radiation', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000004'), + ('70eecc49-2c32-47a4-a176-2abb57334fab', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('ede01416-9693-4095-bdae-a2c144a9ec82', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('f6c68d25-8a1c-47ec-9b8d-4db36cf3fecd', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'Device', 'intervention name updatee', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('65ef7ce9-4992-47a1-8a86-355792ca6fbc', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; -- Dumping data for table public.study_ipdsharing: -1 rows /*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; +INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000003'), + ('ebfe1211-763e-4b10-8e15-7ccb29cb21f5', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; -- Dumping data for table public.study_link: -1 rows /*!40000 ALTER TABLE "study_link" DISABLE KEYS */; +INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), + ('e354922c-9ab3-4b38-ba79-c4d4640737d2', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), + ('040d305e-504d-433b-b5c2-7d56c24d440a', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_link" ENABLE KEYS */; -- Dumping data for table public.study_location: -1 rows /*!40000 ALTER TABLE "study_location" DISABLE KEYS */; +INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000005', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000004'), + ('cda2dc03-95cf-494a-87ea-aac49ac07f0b', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'), + ('72d6a140-e57b-4ba4-a57d-391cdc871c21', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_location" ENABLE KEYS */; -- Dumping data for table public.study_other: -1 rows /*!40000 ALTER TABLE "study_other" DISABLE KEYS */; +INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000003', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'), + ('cd440fa9-988b-4d51-8b66-8c2e42c630b3', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_other" ENABLE KEYS */; -- Dumping data for table public.study_overall_official: -1 rows /*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; +INSERT INTO "study_overall_official" ("id", "first_name", "last_name", "affiliation", "role", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000003'), + ('a0806089-6602-48b0-b870-1d5e91b956a5', 'firstname', 'lastname', 'affiliation', 'Study Chair', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; --- Dumping data for table public.study_reference: -1 rows +-- Dumping data for table public.study_reference: 6 rows /*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; +INSERT INTO "study_reference" ("id", "identifier", "title", "type", "citation", "study_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'The PubMed Unique Identifier ', ' bibliographic reference', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'The PubMed Unique Identifier ', ' bibliographic reference', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), + ('2996e115-8c44-4914-a470-2764ff280316', 'The PubMed Unique Identifier ', ' bibliographic reference', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'The PubMed Unique Identifier ', ' bibliographic reference', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'The PubMed Unique Identifier ', ' bibliographic reference', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000005', 'The PubMed Unique Identifier ', ' bibliographic reference', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000004'); /*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; -- Dumping data for table public.study_sponsors_collaborators: -1 rows /*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; +INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_first_name", "responsible_party_investigator_last_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_first_name", "lead_sponsor_last_name", "collaborator_name", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000005', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('687dea6a-4dbf-45dc-867e-de7b303d4b0c', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; -- Dumping data for table public.study_status: -1 rows /*!40000 ALTER TABLE "study_status" DISABLE KEYS */; +INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2021-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), + ('8100ce8e-406d-4483-bc47-634e97c34713', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_status" ENABLE KEYS */; -- Dumping data for table public.user: -1 rows @@ -230,6 +392,8 @@ INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name" ('00000000-0000-0000-0000-000000000004', 'james.lilly@gmail.com', 'james', 'james', 'lilly', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'); /*!40000 ALTER TABLE "user" ENABLE KEYS */; +-- Dumping data for table public.version: -1 rows +/*!40000 ALTER TABLE "version" DISABLE KEYS */; -- Dumping data for table public.version_participants: -1 rows /*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES @@ -239,6 +403,8 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; +COMMIT; + /*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; From 232b5f8e0875074ba40cd187825d977d1cd9c225 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 31 Aug 2023 13:02:38 -0700 Subject: [PATCH 056/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20add=20ini?= =?UTF-8?q?t=20data?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- init/all_tables.sql | 182 ++++++++++++++++++++++---------------------- 1 file changed, 93 insertions(+), 89 deletions(-) diff --git a/init/all_tables.sql b/init/all_tables.sql index a5113418..fd4cfdc5 100644 --- a/init/all_tables.sql +++ b/init/all_tables.sql @@ -7,18 +7,15 @@ BEGIN; -- Dumping data for table public.dataset: -1 rows +-- done /*!40000 ALTER TABLE "dataset" DISABLE KEYS */; INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000001', '2023-08-15 16:53:05.257623', '2023-08-15 16:53:05.257623', '00000000-0000-0000-0000-000000000001'), - ('b210863a-2bee-4eaf-aad8-999b7a7cae06', '2023-08-20 22:31:18.830152', '2023-08-20 22:31:18.830152', '00000000-0000-0000-0000-000000000001'), - ('89aa8ffb-48b5-49c3-92c4-9b90fbcc736f', '2023-08-29 13:46:58.847208', '2023-08-29 13:46:58.847208', '00000000-0000-0000-0000-000000000001'), - ('e6c4cde9-f769-457e-a1ee-2a6c6dd76609', '2023-08-29 13:54:00.410672', '2023-08-29 13:54:00.410672', '00000000-0000-0000-0000-000000000001'), - ('8c510e24-2fb3-4abb-8712-5b4d6c429d15', '2023-08-29 13:54:28.093018', '2023-08-29 13:54:28.093018', '00000000-0000-0000-0000-000000000001'), - ('151e9c0b-20b3-4558-9eed-51830a708899', '2023-08-29 15:02:18.766003', '2023-08-29 15:02:18.766003', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000006', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; -- Dumping data for table public.dataset_access: -1 rows @@ -170,11 +167,13 @@ INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; -- Dumping data for table public.invited_study_contributor: -1 rows +-- done /*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES - ('aydan.gasimova@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('bhavesh.patel@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000003'), - ('sanjay.soundarajan@@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000004'); + ('Aliya_Herman@yahoo.com', 'editor', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Anastacio50@hotmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Edward0@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Jailyn17@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; -- Dumping data for table public.participant: -1 rows @@ -195,141 +194,136 @@ INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "c /*!40000 ALTER TABLE "participant" ENABLE KEYS */; -- Dumping data for table public.study: -1 rows +-- done /*!40000 ALTER TABLE "study" DISABLE KEYS */; INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), - ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://loremflickr.com/640/480?lock=342651989655552', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), - ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), - ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-03 12:33:10', '2023-01-03 12:33:11'), - ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://loremflickr.com/640/480?lock=342651989655552', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), - ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://loremflickr.com/640/480?lock=342651989655552', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), - ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), - ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://loremflickr.com/640/480?lock=342651989655552', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), + ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), + ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', '2023-08-03 12:33:10', '2023-01-03 12:33:11'); /*!40000 ALTER TABLE "study" ENABLE KEYS */; -- Dumping data for table public.study_arm: -1 rows +-- done /*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Active Comparator', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000003'), - ('75edc7d3-ab7c-404d-a6dd-b55f7fe6446d', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('2b26a772-b4af-4e61-9e76-6642746b78ee', '', '', '', '{""}', '00000000-0000-0000-0000-000000000001'), - ('a82a5e49-a735-4ba3-ab2e-ba64e7fb464c', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000002'), - ('ba03826c-b9db-4517-aeaa-031793de4a25', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('a11728f0-fadb-4bd0-be09-511d5fb39649', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('311fed5e-fd7a-4a02-8465-3b55a05cab04', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; -- Dumping data for table public.study_available_ipd: -1 rows +-- done /*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'); + ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'AS2655AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'AS625AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; -- Dumping data for table public.study_contact: -1 rows +-- done /*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; INSERT INTO "study_contact" ("id", "first_name", "last_name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'holly', 'sienna', 'calmi2', 'editor', '4056074345', 'ext', 'holly.sienna@gmail.com', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'), - ('81e71d41-2c93-47cb-9fac-00d94ab1c1a2', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Johnston', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Rolfson', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Verner', 'Nolan', 'Monahan and Sons', '', '501-039-841', NULL, 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'Lela', 'Cormier', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; -- Dumping data for table public.study_contributor: -1 rows +-- done /*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES - ('editor', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'), ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), - ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000004'), - ('editor', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000006'); + ('viewer', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002'), + ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000003'), + ('viewer', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000003'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003'), + ('owner', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000004'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000005'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000008'); /*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; -- Dumping data for table public.study_description: -1 rows +-- done /*!40000 ALTER TABLE "study_description" DISABLE KEYS */; INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000003'), - ('f51a772e-373a-452a-8106-822840a76339', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'study summary', 'big description', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'study summary', 'big description', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'study summary', 'big description', '00000000-0000-0000-0000-000000000003'); /*!40000 ALTER TABLE "study_description" ENABLE KEYS */; -- Dumping data for table public.study_design: -1 rows +-- done /*!40000 ALTER TABLE "study_design" DISABLE KEYS */; INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_interventional_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Randomized', 'type', 'treatment of cancer', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '1 years', 10, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'Randomized', 'type', 'treatment', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{casecontrol}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3 months', 10, '00000000-0000-0000-0000-000000000002'), - ('2b1312ef-338b-454a-9e17-5db84e17d97c', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{[,'',P,a,r,t,i,c,i,p,a,n,t,'',]}', '{Trials}', 1, 'enrollmentInfo', 2, '{[,'',C,a,s,e,C,o,n,t,r,o,l,'',]}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), - ('ca5500a4-cbce-454a-a767-653461d59397', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{CaseControl}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'Randomized', 'Interventional', 'Treatment', 'description', 'Single Group Assignment', 'Single', 'description', ARRAY ['Participant'], ARRAY ['Phase 1'], 20, 'Actual', 30, NULL, NULL, NULL, NULL, NULL, NULL, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', NULL, 'Observational', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 20, 'Actual', NULL, ARRAY ['Cohort'], ARRAY ['Retrospective'], 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_design" ENABLE KEYS */; -- Dumping data for table public.study_eligibility: 6 rows +-- done /*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id", "minimum_age_value", "minimum_age_unit", "maximum_age_value", "maximum_age_unit") VALUES - ('00000000-0000-0000-0000-000000000004', 'female', 'Correct', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 30, 'UCSD', 54, 'UW'), - ('dfac0d9e-a104-4f4b-ac1d-05f3699c72f3', 'female', 'Not given', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 23, 'UCSD', 32, 'UW'), - ('00000000-0000-0000-0000-000000000002', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 45, 'UCLA', 43, 'UCLA'), - ('00000000-0000-0000-0000-000000000001', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 24, 'UCSD', 34, 'UCLA'), - ('00000000-0000-0000-0000-000000000003', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 56, 'UCLA', 37, 'UCSD'), - ('01ac64ef-cfca-47bc-8f30-67525017461f', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 34, 'UW', 29, 'UW'); + ('00000000-0000-0000-0000-000000000001', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], NULL, NULL, '00000000-0000-0000-0000-000000000001', 24, 'Years', 34, 'Years'), + ('00000000-0000-0000-0000-000000000002', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], 'Description', 'Probability Sample', '00000000-0000-0000-0000-000000000002', 24, 'Years', 34, 'Years'); /*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; -- Dumping data for table public.study_identification: -1 rows +-- done /*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), - ('cfc1b66c-882a-4eee-a6d7-01a7cb018ac2', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', FALSE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; -- Dumping data for table public.study_intervention: -1 rows +-- done /*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'Drug', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'Procedure/Surgery', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'Radiation', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000004'), - ('70eecc49-2c32-47a4-a176-2abb57334fab', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('ede01416-9693-4095-bdae-a2c144a9ec82', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('f6c68d25-8a1c-47ec-9b8d-4db36cf3fecd', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'Device', 'intervention name updatee', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('65ef7ce9-4992-47a1-8a86-355792ca6fbc', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; -- Dumping data for table public.study_ipdsharing: -1 rows +-- done /*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000003'), - ('ebfe1211-763e-4b10-8e15-7ccb29cb21f5', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; -- Dumping data for table public.study_link: -1 rows +-- done /*!40000 ALTER TABLE "study_link" DISABLE KEYS */; INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), - ('e354922c-9ab3-4b38-ba79-c4d4640737d2', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), - ('040d305e-504d-433b-b5c2-7d56c24d440a', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_link" ENABLE KEYS */; -- Dumping data for table public.study_location: -1 rows +-- done /*!40000 ALTER TABLE "study_location" DISABLE KEYS */; INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000005', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000004'), - ('cda2dc03-95cf-494a-87ea-aac49ac07f0b', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'), - ('72d6a140-e57b-4ba4-a57d-391cdc871c21', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_location" ENABLE KEYS */; -- Dumping data for table public.study_other: -1 rows @@ -384,16 +378,26 @@ INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", /*!40000 ALTER TABLE "study_status" ENABLE KEYS */; -- Dumping data for table public.user: -1 rows +-- done /*!40000 ALTER TABLE "user" DISABLE KEYS */; INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES - ('00000000-0000-0000-0000-000000000001', 'bhavesh.patel@gmail.com', 'bhavesh', 'Bhavesh', 'Patel', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), - ('00000000-0000-0000-0000-000000000002', 'sanjay.soundarajan@gmail.com', 'sanjay', 'sanjay', 'soundarajan', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), - ('00000000-0000-0000-0000-000000000003', 'billy.sanders@gmail.com', 'billy', 'billy', 'sanders', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), - ('00000000-0000-0000-0000-000000000004', 'james.lilly@gmail.com', 'james', 'james', 'lilly', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'); + ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', '2023-08-13 12:34:06', 'Schinner, Kuvalis and Beatty'), + ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', '2023-08-13 12:34:06', 'Schmitt Inc'), + ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', '2023-08-13 12:34:06', 'Stracke, Leuschke and Kuvalis'), + ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', '2023-08-13 12:34:06', 'Heidenreich, Wilkinson and Mitchell'); + ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', '2023-08-13 12:34:06', 'Heaney, Russel and Turner'); /*!40000 ALTER TABLE "user" ENABLE KEYS */; -- Dumping data for table public.version: -1 rows +-- done /*!40000 ALTER TABLE "version" DISABLE KEYS */; +INSERT INTO "version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); +/*!40000 ALTER TABLE "version" ENABLE KEYS */; + -- Dumping data for table public.version_participants: -1 rows /*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES From ead046f16c07593bbd5d2a5c9411affcc629f143 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 31 Aug 2023 13:32:17 -0700 Subject: [PATCH 057/505] fix: converted datasetVersion to Version --- apis/contributor.py | 2 +- apis/dataset.py | 10 ++++----- apis/invited_contributor.py | 26 ++++++++++++++++++++++++ model/__init__.py | 4 ++-- model/dataset.py | 8 ++++---- model/dataset_versions.py | 4 ++-- model/participant.py | 4 ++-- model/study_metadata/study_reference.py | 2 +- model/{dataset_version.py => version.py} | 8 ++++---- 9 files changed, 47 insertions(+), 21 deletions(-) rename model/{dataset_version.py => version.py} (89%) diff --git a/apis/contributor.py b/apis/contributor.py index ed6268ee..ee1128e2 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -6,7 +6,7 @@ contributors_model = api.model( - "DatasetVersion", + "Version", { "user_id": fields.String(required=True), "permission": fields.String(required=True), diff --git a/apis/dataset.py b/apis/dataset.py index d4669505..e41722f5 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,13 +1,13 @@ from flask import Response, jsonify, request from flask_restx import Namespace, Resource, fields -from model import Dataset, DatasetVersion, Participant, Study, db +from model import Dataset, Version, Participant, Study, db api = Namespace("dataset", description="dataset operations", path="/") dataset_versions_model = api.model( - "DatasetVersion", + "Version", { "id": fields.String(required=True), "title": fields.String(required=True), @@ -76,11 +76,11 @@ class Version(Resource): @api.doc("dataset version") @api.marshal_with(dataset_versions_model) def get(self, study_id, dataset_id, version_id): - dataset_version = DatasetVersion.query.get(version_id) + dataset_version = Version.query.get(version_id) return dataset_version.to_dict() def put(self, study_id, dataset_id, version_id): - data_version_obj = DatasetVersion.query.get(version_id) + data_version_obj = Version.query.get(version_id) data_version_obj.update(request.json) db.session.commit() return jsonify(data_version_obj.to_dict()) @@ -103,7 +103,7 @@ def post(self, study_id: int, dataset_id: int): data = request.json data["participants"] = [Participant.query.get(i) for i in data["participants"]] data_obj = Dataset.query.get(dataset_id) - dataset_versions = DatasetVersion.from_data(data_obj, data) + dataset_versions = Version.from_data(data_obj, data) db.session.add(dataset_versions) db.session.commit() return jsonify(dataset_versions.to_dict()) diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index e69de29b..8e81e3ab 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -0,0 +1,26 @@ +from flask_restx import Namespace, Resource, fields + +from model import StudyInvitedContributor, Study + +api = Namespace("contributor", description="contributors", path="/") + + +contributors_model = api.model( + "Version", + { + "user_id": fields.String(required=True), + "permission": fields.String(required=True), + "study_id": fields.String(required=True), + }, +) + + +@api.route("/study//contributor") +class AddParticipant(Resource): + @api.doc("invited contributor") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(contributors_model) + def post(self, study_id: int, invited_contributor_id: int): + invited_contributors = Study.query.get(invited_contributor_id) + diff --git a/model/__init__.py b/model/__init__.py index 3cb90606..fb161460 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,4 +1,4 @@ -from .dataset_version import DatasetVersion +from .version import Version from .dataset_versions import DatasetVersions from .db import db from .participant import Participant @@ -60,7 +60,7 @@ "Study", "Dataset", "DatasetVersions", - "DatasetVersion", + "Version", "Participant", "db", "User", diff --git a/model/dataset.py b/model/dataset.py index 7fce1f08..e99519ae 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -26,7 +26,7 @@ def __init__(self, study): "DatasetContributor", back_populates="dataset" ) dataset_versions = db.relationship( - "DatasetVersion", back_populates="dataset", lazy="dynamic" + "Version", back_populates="dataset", lazy="dynamic" ) dataset_access = db.relationship("DatasetAccess", back_populates="dataset") @@ -69,14 +69,14 @@ def to_dict(self): def last_published(self): return ( - self.dataset_versions.filter(model.DatasetVersion.published == true()) - .order_by(model.DatasetVersion.published_on.desc()) + self.dataset_versions.filter(model.Version.published == true()) + .order_by(model.Version.published_on.desc()) .first() ) def last_modified(self): return self.dataset_versions.order_by( - model.DatasetVersion.updated_on.desc() + model.Version.updated_on.desc() ).first() @staticmethod diff --git a/model/dataset_versions.py b/model/dataset_versions.py index f2405398..6ee5e780 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -4,8 +4,8 @@ class DatasetVersions: def __init__( self, - last_published: model.DatasetVersion, - last_modified: model.DatasetVersion, + last_published: model.Version, + last_modified: model.Version, id: str, ): self.latest_version = last_modified.id diff --git a/model/participant.py b/model/participant.py index 027f1371..f10f799f 100644 --- a/model/participant.py +++ b/model/participant.py @@ -23,9 +23,9 @@ def __init__(self, study): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="participants") dataset_versions = db.relationship( - "DatasetVersion", + "Version", back_populates="participants", - secondary=model.dataset_version.version_participants, + secondary=model.version.version_participants, ) def to_dict(self): diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 8ef08eac..a4b5e36c 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -15,7 +15,7 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) title = db.Column(db.String, nullable=False) - type = db.Column(db.BOOLEAN, nullable=False) + type = db.Column(db.String, nullable=False) citation = db.Column(db.String, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) diff --git a/model/dataset_version.py b/model/version.py similarity index 89% rename from model/dataset_version.py rename to model/version.py index 8d20c383..20404d7a 100644 --- a/model/dataset_version.py +++ b/model/version.py @@ -8,18 +8,18 @@ "version_participants", db.Model.metadata, db.Column( - "dataset_version_id", db.ForeignKey("dataset_version.id"), primary_key=True + "dataset_version_id", db.ForeignKey("version.id"), primary_key=True ), db.Column("participant_id", db.ForeignKey("participant.id"), primary_key=True), ) -class DatasetVersion(db.Model): +class Version(db.Model): def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) - __tablename__ = "dataset_version" + __tablename__ = "version" id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) @@ -48,7 +48,7 @@ def to_dict(self): @staticmethod def from_data(dataset: Dataset, data: dict): - dataset_version_obj = DatasetVersion(dataset) + dataset_version_obj = Version(dataset) dataset_version_obj.update(data) return dataset_version_obj From 0b8a74d969fb20ff273b149e2e8695bea8da7220 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 31 Aug 2023 20:32:56 +0000 Subject: [PATCH 058/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/invited_contributor.py | 1 - model/dataset.py | 4 +--- model/version.py | 4 +--- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 8e81e3ab..bd224f2d 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -23,4 +23,3 @@ class AddParticipant(Resource): @api.marshal_with(contributors_model) def post(self, study_id: int, invited_contributor_id: int): invited_contributors = Study.query.get(invited_contributor_id) - diff --git a/model/dataset.py b/model/dataset.py index e99519ae..767c880a 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -75,9 +75,7 @@ def last_published(self): ) def last_modified(self): - return self.dataset_versions.order_by( - model.Version.updated_on.desc() - ).first() + return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() @staticmethod def from_data(study, data: dict): diff --git a/model/version.py b/model/version.py index 20404d7a..54d8fea8 100644 --- a/model/version.py +++ b/model/version.py @@ -7,9 +7,7 @@ version_participants = db.Table( "version_participants", db.Model.metadata, - db.Column( - "dataset_version_id", db.ForeignKey("version.id"), primary_key=True - ), + db.Column("dataset_version_id", db.ForeignKey("version.id"), primary_key=True), db.Column("participant_id", db.ForeignKey("participant.id"), primary_key=True), ) From 14b1c94009609d01f4c4a4f61fafb91060286215 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 31 Aug 2023 14:10:08 -0700 Subject: [PATCH 059/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init=20data?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- init/all_tables.sql | 47 ++++++++++++++++++--------------------------- 1 file changed, 19 insertions(+), 28 deletions(-) diff --git a/init/all_tables.sql b/init/all_tables.sql index fd4cfdc5..f7386abb 100644 --- a/init/all_tables.sql +++ b/init/all_tables.sql @@ -327,54 +327,45 @@ INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip" /*!40000 ALTER TABLE "study_location" ENABLE KEYS */; -- Dumping data for table public.study_other: -1 rows +-- done /*!40000 ALTER TABLE "study_other" DISABLE KEYS */; INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000003', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'), - ('cd440fa9-988b-4d51-8b66-8c2e42c630b3', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', TRUE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '1 GB', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', FALSE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '3 GB', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_other" ENABLE KEYS */; -- Dumping data for table public.study_overall_official: -1 rows +-- done /*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; INSERT INTO "study_overall_official" ("id", "first_name", "last_name", "affiliation", "role", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000003'), - ('a0806089-6602-48b0-b870-1d5e91b956a5', 'firstname', 'lastname', 'affiliation', 'Study Chair', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Bashirian', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Grady', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Maiya', 'Bartoletti', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; -- Dumping data for table public.study_reference: 6 rows +-- done /*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; -INSERT INTO "study_reference" ("id", "identifier", "title", "type", "citation", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'The PubMed Unique Identifier ', ' bibliographic reference', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'The PubMed Unique Identifier ', ' bibliographic reference', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), - ('2996e115-8c44-4914-a470-2764ff280316', 'The PubMed Unique Identifier ', ' bibliographic reference', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'The PubMed Unique Identifier ', ' bibliographic reference', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'The PubMed Unique Identifier ', ' bibliographic reference', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000005', 'The PubMed Unique Identifier ', ' bibliographic reference', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000004'); +INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'Yes', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'PMID1A2234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; -- Dumping data for table public.study_sponsors_collaborators: -1 rows /*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; -INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_first_name", "responsible_party_investigator_last_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_first_name", "lead_sponsor_last_name", "collaborator_name", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000005', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('687dea6a-4dbf-45dc-867e-de7b303d4b0c', 'San Diego', 'firstname', 'lastname', 'title', 'affiliation', 'name', 'lastname', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'); +-- done +INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_first_name", "responsible_party_investigator_last_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name","collaborator_name", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Principal Investigator', 'Sean', 'West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Principal Investigator', 'Sean', 'East', 'Title 1', 'Medhurst Inc', 'Maiya Bartoletti', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; -- Dumping data for table public.study_status: -1 rows +-- done /*!40000 ALTER TABLE "study_status" DISABLE KEYS */; INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2021-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), - ('8100ce8e-406d-4483-bc47-634e97c34713', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'Recruiting', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Anticipated', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Suspended', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_status" ENABLE KEYS */; -- Dumping data for table public.user: -1 rows From fcc073ee767364d48745e84e602ce5fe5bcce554 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 31 Aug 2023 14:27:55 -0700 Subject: [PATCH 060/505] fix: database props --- apis/dataset.py | 18 +++++++++++++++ model/study_metadata/study_reference.py | 3 --- .../study_sponsors_collaborators.py | 23 +++++++------------ 3 files changed, 26 insertions(+), 18 deletions(-) diff --git a/apis/dataset.py b/apis/dataset.py index e41722f5..86436b75 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -68,6 +68,24 @@ def put(self, study_id, dataset_id): db.session.commit() return data_obj.to_dict() + def delete(self, study_id, dataset_id): + data = request.json + study_obj = Study.query.get(study_id) + for dataset_ in study_obj.dataset: + db.session.delete(dataset_) + db.session.commit() + db.session.delete(study_obj) + db.session.commit() + return Response(status=204) + + # def delete(self, study_id, dataset_id, version_id): + # data_obj = Dataset.query.get(dataset_id) + # for version in data_obj.dataset_versions: + # db.session.delete(version) + # db.session.commit() + # db.session.delete(data_obj) + # db.session.commit() + # return Response(status=204) @api.route("/study//dataset//version/") class Version(Resource): diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index a4b5e36c..5c8fdf1a 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -14,7 +14,6 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) - title = db.Column(db.String, nullable=False) type = db.Column(db.String, nullable=False) citation = db.Column(db.String, nullable=False) @@ -26,7 +25,6 @@ def to_dict(self): return { "id": self.id, "identifier": self.identifier, - "title": self.title, "type": self.type, "citation": self.citation, } @@ -42,7 +40,6 @@ def from_data(study, data: dict): def update(self, data): """Updates the study from a dictionary""" self.identifier = data["identifier"] - self.title = data["title"] self.type = data["type"] self.citation = data["citation"] diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index f3c5a24e..5234d949 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -15,12 +15,10 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) responsible_party_type = db.Column(db.String, nullable=False) - responsible_party_investigator_first_name = db.Column(db.String, nullable=False) - responsible_party_investigator_last_name = db.Column(db.String, nullable=False) + responsible_party_investigator_name = db.Column(db.String, nullable=False) responsible_party_investigator_title = db.Column(db.String, nullable=False) responsible_party_investigator_affiliation = db.Column(db.String, nullable=False) - lead_sponsor_first_name = db.Column(db.String, nullable=False) - lead_sponsor_last_name = db.Column(db.String, nullable=False) + lead_sponsor_name = db.Column(db.String, nullable=False) collaborator_name = db.Column(ARRAY(String), nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) @@ -31,12 +29,10 @@ def to_dict(self): return { "id": self.id, "responsible_party_type": self.responsible_party_type, - "responsible_party_investigator_first_name": self.responsible_party_investigator_first_name, - "responsible_party_investigator_last_name": self.responsible_party_investigator_last_name, + "responsible_party_investigator_name": self.responsible_party_investigator_name, "responsible_party_investigator_title": self.responsible_party_investigator_title, "responsible_party_investigator_affiliation": self.responsible_party_investigator_affiliation, - "lead_sponsor_first_name": self.lead_sponsor_first_name, - "lead_sponsor_last_name": self.lead_sponsor_last_name, + "lead_sponsor_name": self.lead_sponsor_name, "collaborator_name": self.collaborator_name, } @@ -51,11 +47,9 @@ def from_data(study, data: dict): def update(self, data): """Updates the study from a dictionary""" self.responsible_party_type = data["responsible_party_type"] - self.responsible_party_investigator_first_name = data[ - "responsible_party_investigator_first_name" - ] - self.responsible_party_investigator_last_name = data[ - "responsible_party_investigator_last_name" + + self.responsible_party_investigator_name = data[ + "responsible_party_investigator_name" ] self.responsible_party_investigator_title = data[ "responsible_party_investigator_title" @@ -63,8 +57,7 @@ def update(self, data): self.responsible_party_investigator_affiliation = data[ "responsible_party_investigator_affiliation" ] - self.lead_sponsor_first_name = data["lead_sponsor_first_name"] - self.lead_sponsor_last_name = data["lead_sponsor_last_name"] + self.lead_sponsor_name = data["lead_sponsor_name"] self.collaborator_name = data["collaborator_name"] def validate(self): From 1eb0e83ae838afc744269ec8c271e2ddf66f3a5e Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 31 Aug 2023 15:48:39 -0700 Subject: [PATCH 061/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init=20data?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 4 + db-docker-compose.yaml | 22 ++ db-docker-compose.yml | 13 - dev-docker-compose.yaml | 23 +- {init => sql}/all_tables.sql | 0 sql/init.sql | 407 ++++++++++++++++++++++++++++++ {init => sql}/specific_tables.sql | 0 7 files changed, 445 insertions(+), 24 deletions(-) create mode 100644 db-docker-compose.yaml delete mode 100644 db-docker-compose.yml rename {init => sql}/all_tables.sql (100%) create mode 100644 sql/init.sql rename {init => sql}/specific_tables.sql (100%) diff --git a/.gitignore b/.gitignore index 5d1cc910..a78d352e 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,7 @@ coverage # Environment variables .env + +# Database +postgres_data/* +postgres-data/* diff --git a/db-docker-compose.yaml b/db-docker-compose.yaml new file mode 100644 index 00000000..c30c55d5 --- /dev/null +++ b/db-docker-compose.yaml @@ -0,0 +1,22 @@ +version: '3' +services: + postgres: + image: postgres:9.6 + restart: always + environment: + POSTGRES_PASSWORD: postgres + POSTGRES_USER: postgres + POSTGRES_DB: postgres + ports: + - 5432:5432 + volumes: + - ./postgres_data:/var/lib/postgresql/data + - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql + # pgadmin: + # image: dpage/pgadmin4 + # restart: always + # environment: + # PGADMIN_DEFAULT_EMAIL:postgres + # PGADMIN_DEFAULT_PASSWORD:postgres + # ports: + # - 5050:80 \ No newline at end of file diff --git a/db-docker-compose.yml b/db-docker-compose.yml deleted file mode 100644 index 67b5526c..00000000 --- a/db-docker-compose.yml +++ /dev/null @@ -1,13 +0,0 @@ -version: '3' -services: - postgres: - image: postgres:9.6 - restart: always - environment: - POSTGRES_PASSWORD: postgres - POSTGRES_USER: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - volumes: - - ./postgres-data:/var/lib/postgresql/data \ No newline at end of file diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 1e8ea83b..47289383 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -16,16 +16,17 @@ services: FLASK_ENV: development FLASK_DEBUG: 1 FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" -# database: -# image: postgres:15.3 -# environment: -# - POSTGRES_USER=POSTGRES_USER -# - POSTGRES_PASSWORD=POSTGRES_PASSWORD -# - POSTGRES_DB=POSTGRES_DB -# ports: -# - 5432:5432 -# restart: always -# volumes: -# - db-data:/var/lib/postgresql/data + database: + image: postgres:15.3 + environment: + - POSTGRES_USER=POSTGRES_USER + - POSTGRES_PASSWORD=POSTGRES_PASSWORD + - POSTGRES_DB=POSTGRES_DB + ports: + - 5432:5432 + restart: always + volumes: + - ./postgres_data:/var/lib/postgresql/data + - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql # volumes: # db-data: diff --git a/init/all_tables.sql b/sql/all_tables.sql similarity index 100% rename from init/all_tables.sql rename to sql/all_tables.sql diff --git a/sql/init.sql b/sql/init.sql new file mode 100644 index 00000000..f7386abb --- /dev/null +++ b/sql/init.sql @@ -0,0 +1,407 @@ +-- -------------------------------------------------------- +-- Host: 7hg.h.filess.io +-- Server version: PostgreSQL 14.4 on x86_64-pc-linux-musl, compiled by gcc (Alpine 11.2.1_git20220219) 11.2.1 20220219, 64-bit +-- Server OS: +-- HeidiSQL Version: 12.3.0.6589 +-- -------------------------------------------------------- + +BEGIN; +-- Dumping data for table public.dataset: -1 rows +-- done +/*!40000 ALTER TABLE "dataset" DISABLE KEYS */; +INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000006', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'); +/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; + +-- Dumping data for table public.dataset_access: -1 rows +/*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; +INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'main', 'Clinical research studies ', 'https://aireadi.org', '1st August', NULL), + ('badac1ab-26fd-4f94-b2b4-b198365a198f', 'none', '', '', '', NULL), + ('6d2c020f-71b1-48d2-8532-89a563868fa4', 'none', '', '', '', NULL), + ('f8f3bf91-2eb9-49b8-a8f0-1c92def99bcf', 'none', '', '', '', NULL), + ('fdc10b6d-2dc6-41c1-b43e-202a24abc80a', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'), + ('395d37d9-e3cf-4989-81f6-21dd2202d1ca', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; + +-- Dumping data for table public.dataset_alternate_identifier: 3 rows +/*!40000 ALTER TABLE "dataset_alternate_identifier" DISABLE KEYS */; +INSERT INTO "dataset_alternate_identifier" ("id", "identifier", "identifier_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', '00000000-0000-0000-0000-000000000001'), + ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_alternate_identifier" ENABLE KEYS */; + +-- Dumping data for table public.dataset_consent: -1 rows +/*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; +INSERT INTO "dataset_consent" ("id", "type", "noncommercial", "geog_restrict", "research_type", "genetic_only", "no_methods", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'), + ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; + +-- Dumping data for table public.dataset_contributor: -1 rows +/*!40000 ALTER TABLE "dataset_contributor" DISABLE KEYS */; +INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_contributor" ENABLE KEYS */; + +-- Dumping data for table public.dataset_contributor_affiliation: -1 rows +/*!40000 ALTER TABLE "dataset_contributor_affiliation" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_contributor_affiliation" ENABLE KEYS */; + +-- Dumping data for table public.dataset_date: -1 rows +/*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; +INSERT INTO "dataset_date" ("id", "date", "date_type", "data_information", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000005'), + ('0b1775e5-d110-482f-a1c4-2aa3947b8db8', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'), + ('dc090dbd-6fa3-4b61-829e-2f139bdbd116', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; + +-- Dumping data for table public.dataset_description: -1 rows +/*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; +INSERT INTO "dataset_description" ("id", "description", "description_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000004'), + ('78f2b774-2f5a-4096-b82e-9923ca04395b', '', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', '', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; + +-- Dumping data for table public.dataset_de_ident_level: -1 rows +/*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; +INSERT INTO "dataset_de_ident_level" ("id", "type", "direct", "hipaa", "dates", "nonarr", "k_anon", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'NA', 'false', 'true', 'false', 'true', 'false', 'none', '00000000-0000-0000-0000-000000000002'), + ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'), + ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; + +-- Dumping data for table public.dataset_funder: -1 rows +/*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; +INSERT INTO "dataset_funder" ("id", "name", "identifier", "identifier_type", "identifier_scheme_uri", "award_number", "award_uri", "award_title", "dataset_id") VALUES + ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; + +-- Dumping data for table public.dataset_managing_organization: -1 rows +/*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; +INSERT INTO "dataset_managing_organization" ("id", "name", "ror_id", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', '00000000-0000-0000-0000-000000000001'), + ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; + +-- Dumping data for table public.dataset_other: -1 rows +/*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; +INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "managing_organization_ror_id", "size", "standards_followed", "acknowledgement", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000002'), + ('2fca4640-6f0e-406c-8c7a-e93a0740b9c6', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org', 'NA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; + +-- Dumping data for table public.dataset_readme: -1 rows +/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; + +-- Dumping data for table public.dataset_record_keys: -1 rows +/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; +INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES + ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), + ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item: -1 rows +/*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; +INSERT INTO "dataset_related_item" ("id", "type", "relation_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'main', 'main', '00000000-0000-0000-0000-000000000002'), + ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_contributor: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; +INSERT INTO "dataset_related_item_contributor" ("id", "name", "name_type", "creator", "contributor_type", "dataset_related_item_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'string', 'true', 'owner', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_identifier: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_identifier" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_identifier" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_other: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_other" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_other" ENABLE KEYS */; + +-- Dumping data for table public.dataset_related_item_title: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_title" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_title" ENABLE KEYS */; + +-- Dumping data for table public.dataset_rights: -1 rows +/*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; +INSERT INTO "dataset_rights" ("id", "rights", "uri", "identifier", "identifier_scheme", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'), + ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; + +-- Dumping data for table public.dataset_subject: -1 rows +/*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; +INSERT INTO "dataset_subject" ("id", "subject", "scheme", "scheme_uri", "value_uri", "classification_code", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'), + ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; + +-- Dumping data for table public.dataset_title: -1 rows +/*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; +INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES + ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; + +-- Dumping data for table public.invited_study_contributor: -1 rows +-- done +/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; +INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES + ('Aliya_Herman@yahoo.com', 'editor', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Anastacio50@hotmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Edward0@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Jailyn17@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; + +-- Dumping data for table public.participant: -1 rows +/*!40000 ALTER TABLE "participant" DISABLE KEYS */; +INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000004'), + ('921ba857-dd08-4149-8f5c-245c6c93ef84', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:23.627034', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('458d2c15-6ed8-4f70-a47d-70b42f2f1b86', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:36.656094', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('35750167-40c5-4f4a-9d8e-ebe89c2efcfc', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:52.555088', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:59.614647', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('b444520d-0eac-4065-a86d-004481f68d8a', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:45:49.495595', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), + ('88c7592a-4382-4d6b-a197-e880e49db3c0', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:46:17.682171', '2023-08-29 13:46:17.682171', '00000000-0000-0000-0000-000000000001'), + ('ba73ed99-6ec2-46e0-acdb-4a00c31dd572', 'aydan', 'gasimova', '1221d kibler drive', '20', '2023-08-29 15:08:03.758771', '2023-08-29 15:08:03.758771', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-13 16:33:53', '2023-08-29 15:09:04.323914', '00000000-0000-0000-0000-000000000001'), + ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-29 15:15:35.891076', '2023-08-29 15:15:35.891076', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "participant" ENABLE KEYS */; + +-- Dumping data for table public.study: -1 rows +-- done +/*!40000 ALTER TABLE "study" DISABLE KEYS */; +INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), + ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), + ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', '2023-08-03 12:33:10', '2023-01-03 12:33:11'); +/*!40000 ALTER TABLE "study" ENABLE KEYS */; + +-- Dumping data for table public.study_arm: -1 rows +-- done +/*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; +INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; + +-- Dumping data for table public.study_available_ipd: -1 rows +-- done +/*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; +INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'AS2655AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'AS625AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; + +-- Dumping data for table public.study_contact: -1 rows +-- done +/*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; +INSERT INTO "study_contact" ("id", "first_name", "last_name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Johnston', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Rolfson', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Verner', 'Nolan', 'Monahan and Sons', '', '501-039-841', NULL, 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'Lela', 'Cormier', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; + +-- Dumping data for table public.study_contributor: -1 rows +-- done +/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; +INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'), + ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('viewer', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002'), + ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000003'), + ('viewer', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000003'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003'), + ('owner', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000004'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000005'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000008'); +/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; + +-- Dumping data for table public.study_description: -1 rows +-- done +/*!40000 ALTER TABLE "study_description" DISABLE KEYS */; +INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'study summary', 'big description', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'study summary', 'big description', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'study summary', 'big description', '00000000-0000-0000-0000-000000000003'); +/*!40000 ALTER TABLE "study_description" ENABLE KEYS */; + +-- Dumping data for table public.study_design: -1 rows +-- done +/*!40000 ALTER TABLE "study_design" DISABLE KEYS */; +INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_interventional_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Randomized', 'Interventional', 'Treatment', 'description', 'Single Group Assignment', 'Single', 'description', ARRAY ['Participant'], ARRAY ['Phase 1'], 20, 'Actual', 30, NULL, NULL, NULL, NULL, NULL, NULL, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', NULL, 'Observational', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 20, 'Actual', NULL, ARRAY ['Cohort'], ARRAY ['Retrospective'], 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_design" ENABLE KEYS */; + +-- Dumping data for table public.study_eligibility: 6 rows +-- done +/*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; +INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id", "minimum_age_value", "minimum_age_unit", "maximum_age_value", "maximum_age_unit") VALUES + ('00000000-0000-0000-0000-000000000001', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], NULL, NULL, '00000000-0000-0000-0000-000000000001', 24, 'Years', 34, 'Years'), + ('00000000-0000-0000-0000-000000000002', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], 'Description', 'Probability Sample', '00000000-0000-0000-0000-000000000002', 24, 'Years', 34, 'Years'); +/*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; + +-- Dumping data for table public.study_identification: -1 rows +-- done +/*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; +INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', FALSE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; + +-- Dumping data for table public.study_intervention: -1 rows +-- done +/*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; +INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; + +-- Dumping data for table public.study_ipdsharing: -1 rows +-- done +/*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; +INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; + +-- Dumping data for table public.study_link: -1 rows +-- done +/*!40000 ALTER TABLE "study_link" DISABLE KEYS */; +INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_link" ENABLE KEYS */; + +-- Dumping data for table public.study_location: -1 rows +-- done +/*!40000 ALTER TABLE "study_location" DISABLE KEYS */; +INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_location" ENABLE KEYS */; + +-- Dumping data for table public.study_other: -1 rows +-- done +/*!40000 ALTER TABLE "study_other" DISABLE KEYS */; +INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', TRUE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '1 GB', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', FALSE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '3 GB', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_other" ENABLE KEYS */; + +-- Dumping data for table public.study_overall_official: -1 rows +-- done +/*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; +INSERT INTO "study_overall_official" ("id", "first_name", "last_name", "affiliation", "role", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Bashirian', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Grady', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Maiya', 'Bartoletti', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; + +-- Dumping data for table public.study_reference: 6 rows +-- done +/*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; +INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'Yes', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'PMID1A2234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; + +-- Dumping data for table public.study_sponsors_collaborators: -1 rows +/*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; +-- done +INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_first_name", "responsible_party_investigator_last_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name","collaborator_name", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Principal Investigator', 'Sean', 'West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Principal Investigator', 'Sean', 'East', 'Title 1', 'Medhurst Inc', 'Maiya Bartoletti', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; + +-- Dumping data for table public.study_status: -1 rows +-- done +/*!40000 ALTER TABLE "study_status" DISABLE KEYS */; +INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Recruiting', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Anticipated', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Suspended', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_status" ENABLE KEYS */; + +-- Dumping data for table public.user: -1 rows +-- done +/*!40000 ALTER TABLE "user" DISABLE KEYS */; +INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES + ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', '2023-08-13 12:34:06', 'Schinner, Kuvalis and Beatty'), + ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', '2023-08-13 12:34:06', 'Schmitt Inc'), + ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', '2023-08-13 12:34:06', 'Stracke, Leuschke and Kuvalis'), + ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', '2023-08-13 12:34:06', 'Heidenreich, Wilkinson and Mitchell'); + ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', '2023-08-13 12:34:06', 'Heaney, Russel and Turner'); +/*!40000 ALTER TABLE "user" ENABLE KEYS */; + +-- Dumping data for table public.version: -1 rows +-- done +/*!40000 ALTER TABLE "version" DISABLE KEYS */; +INSERT INTO "version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); +/*!40000 ALTER TABLE "version" ENABLE KEYS */; + +-- Dumping data for table public.version_participants: -1 rows +/*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; +INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; + +COMMIT; + +/*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; +/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; +/*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40111 SET SQL_NOTES=IFNULL(@OLD_SQL_NOTES, 1) */; diff --git a/init/specific_tables.sql b/sql/specific_tables.sql similarity index 100% rename from init/specific_tables.sql rename to sql/specific_tables.sql From 1f397778a4668395fad17e53f597a683f700d14a Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 31 Aug 2023 15:59:42 -0700 Subject: [PATCH 062/505] fix: update database props --- sql/all_tables.sql | 873 +++++++++++++++++++++++++++++++++++++-------- 1 file changed, 727 insertions(+), 146 deletions(-) diff --git a/sql/all_tables.sql b/sql/all_tables.sql index f7386abb..1e3a33c4 100644 --- a/sql/all_tables.sql +++ b/sql/all_tables.sql @@ -5,19 +5,45 @@ -- HeidiSQL Version: 12.3.0.6589 -- -------------------------------------------------------- -BEGIN; --- Dumping data for table public.dataset: -1 rows --- done +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET NAMES */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- Dumping structure for table public.dataset +CREATE TABLE IF NOT EXISTS "dataset" ( + "id" CHAR(36) NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset: 4 rows /*!40000 ALTER TABLE "dataset" DISABLE KEYS */; INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'); - ('00000000-0000-0000-0000-000000000006', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'); + ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; +-- Dumping structure for table public.dataset_access +CREATE TABLE IF NOT EXISTS "dataset_access" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "url" VARCHAR NOT NULL, + "url_last_checked" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_access_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_access: -1 rows /*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES @@ -25,157 +51,422 @@ INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_chec ('badac1ab-26fd-4f94-b2b4-b198365a198f', 'none', '', '', '', NULL), ('6d2c020f-71b1-48d2-8532-89a563868fa4', 'none', '', '', '', NULL), ('f8f3bf91-2eb9-49b8-a8f0-1c92def99bcf', 'none', '', '', '', NULL), - ('fdc10b6d-2dc6-41c1-b43e-202a24abc80a', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'), - ('395d37d9-e3cf-4989-81f6-21dd2202d1ca', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'); + ('395d37d9-e3cf-4989-81f6-21dd2202d1ca', 'none', '', '', '', NULL), + ('fdc10b6d-2dc6-41c1-b43e-202a24abc80a', 'none', '', '', '', NULL); /*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; +-- Dumping structure for table public.dataset_alternate_identifier +CREATE TABLE IF NOT EXISTS "dataset_alternate_identifier" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_identifier_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_alternate_identifier: 3 rows /*!40000 ALTER TABLE "dataset_alternate_identifier" DISABLE KEYS */; INSERT INTO "dataset_alternate_identifier" ("id", "identifier", "identifier_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', '00000000-0000-0000-0000-000000000001'), - ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', NULL), + ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', NULL), + ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', NULL); /*!40000 ALTER TABLE "dataset_alternate_identifier" ENABLE KEYS */; +-- Dumping structure for table public.dataset_consent +CREATE TABLE IF NOT EXISTS "dataset_consent" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "noncommercial" BOOLEAN NOT NULL, + "geog_restrict" BOOLEAN NOT NULL, + "research_type" BOOLEAN NOT NULL, + "genetic_only" BOOLEAN NOT NULL, + "no_methods" BOOLEAN NOT NULL, + "details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_consent_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_consent: -1 rows /*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; INSERT INTO "dataset_consent" ("id", "type", "noncommercial", "geog_restrict", "research_type", "genetic_only", "no_methods", "details", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'), - ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', NULL), + ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', NULL); /*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; +-- Dumping structure for table public.dataset_contributor +CREATE TABLE IF NOT EXISTS "dataset_contributor" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "name_type" VARCHAR NOT NULL, + "name_identifier" VARCHAR NOT NULL, + "name_identifier_scheme" VARCHAR NOT NULL, + "name_identifier_scheme_uri" VARCHAR NOT NULL, + "creator" BOOLEAN NOT NULL, + "contributor_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_contributor_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_contributor: -1 rows /*!40000 ALTER TABLE "dataset_contributor" DISABLE KEYS */; INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', NULL); /*!40000 ALTER TABLE "dataset_contributor" ENABLE KEYS */; +-- Dumping structure for table public.dataset_contributor_affiliation +CREATE TABLE IF NOT EXISTS "dataset_contributor_affiliation" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_scheme" VARCHAR NOT NULL, + "identifier_scheme_uri" VARCHAR NOT NULL, + "dataset_contributor_id" VARCHAR NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_contributor_affiliation_dataset_contributor_id_fkey" FOREIGN KEY ("dataset_contributor_id") REFERENCES "dataset_contributor" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_contributor_affiliation: -1 rows /*!40000 ALTER TABLE "dataset_contributor_affiliation" DISABLE KEYS */; /*!40000 ALTER TABLE "dataset_contributor_affiliation" ENABLE KEYS */; +-- Dumping structure for table public.dataset_date +CREATE TABLE IF NOT EXISTS "dataset_date" ( + "id" CHAR(36) NOT NULL, + "date" VARCHAR NOT NULL, + "date_type" VARCHAR NOT NULL, + "data_information" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_date_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_date: -1 rows /*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; INSERT INTO "dataset_date" ("id", "date", "date_type", "data_information", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000003', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000005'), - ('0b1775e5-d110-482f-a1c4-2aa3947b8db8', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'), - ('dc090dbd-6fa3-4b61-829e-2f139bdbd116', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', NULL), + ('00000000-0000-0000-0000-000000000004', '2023', 'day', 'none', NULL), + ('0b1775e5-d110-482f-a1c4-2aa3947b8db8', '', 'na', 'none', NULL), + ('dc090dbd-6fa3-4b61-829e-2f139bdbd116', '', 'na', 'none', NULL); /*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; +-- Dumping structure for table public.dataset_description +CREATE TABLE IF NOT EXISTS "dataset_description" ( + "id" CHAR(36) NOT NULL, + "description" VARCHAR NOT NULL, + "description_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_description_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_description: -1 rows /*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; INSERT INTO "dataset_description" ("id", "description", "description_type", "dataset_id") VALUES ('00000000-0000-0000-0000-000000000002', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000003', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000004', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000004'), - ('78f2b774-2f5a-4096-b82e-9923ca04395b', '', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', '', '', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', '', '', NULL), + ('78f2b774-2f5a-4096-b82e-9923ca04395b', '', '', NULL); /*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; +-- Dumping structure for table public.dataset_de_ident_level +CREATE TABLE IF NOT EXISTS "dataset_de_ident_level" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "direct" BOOLEAN NOT NULL, + "hipaa" BOOLEAN NOT NULL, + "dates" BOOLEAN NOT NULL, + "nonarr" BOOLEAN NOT NULL, + "k_anon" BOOLEAN NOT NULL, + "details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_de_ident_level_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_de_ident_level: -1 rows /*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; INSERT INTO "dataset_de_ident_level" ("id", "type", "direct", "hipaa", "dates", "nonarr", "k_anon", "details", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002', 'NA', 'false', 'true', 'false', 'true', 'false', 'none', '00000000-0000-0000-0000-000000000002'), - ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'), - ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', NULL), + ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', NULL), + ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', NULL); /*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; +-- Dumping structure for table public.dataset_funder +CREATE TABLE IF NOT EXISTS "dataset_funder" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "identifier_scheme_uri" VARCHAR NOT NULL, + "award_number" VARCHAR NOT NULL, + "award_uri" VARCHAR NOT NULL, + "award_title" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_funder_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_funder: -1 rows /*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; INSERT INTO "dataset_funder" ("id", "name", "identifier", "identifier_type", "identifier_scheme_uri", "award_number", "award_uri", "award_title", "dataset_id") VALUES - ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', '00000000-0000-0000-0000-000000000001'); + ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', NULL); /*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; +-- Dumping structure for table public.dataset_managing_organization +CREATE TABLE IF NOT EXISTS "dataset_managing_organization" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "ror_id" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_managing_organization_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_managing_organization: -1 rows /*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; INSERT INTO "dataset_managing_organization" ("id", "name", "ror_id", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', '00000000-0000-0000-0000-000000000001'), - ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', NULL), + ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', NULL); /*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; +-- Dumping structure for table public.dataset_other +CREATE TABLE IF NOT EXISTS "dataset_other" ( + "id" CHAR(36) NOT NULL, + "language" VARCHAR NOT NULL, + "managing_organization_name" VARCHAR NOT NULL, + "managing_organization_ror_id" VARCHAR NOT NULL, + "size" UNKNOWN NOT NULL, + "standards_followed" VARCHAR NOT NULL, + "acknowledgement" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_other_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_other: -1 rows /*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "managing_organization_ror_id", "size", "standards_followed", "acknowledgement", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000003', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000002'), - ('2fca4640-6f0e-406c-8c7a-e93a0740b9c6', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org', 'NA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', NULL), + ('00000000-0000-0000-0000-000000000002', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', NULL), + ('2fca4640-6f0e-406c-8c7a-e93a0740b9c6', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org', 'NA', NULL); /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; +-- Dumping structure for table public.dataset_readme +CREATE TABLE IF NOT EXISTS "dataset_readme" ( + "id" CHAR(36) NOT NULL, + "content" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_readme: -1 rows /*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES - ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', NULL); /*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; +-- Dumping structure for table public.dataset_record_keys +CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( + "id" CHAR(36) NOT NULL, + "key_type" VARCHAR NOT NULL, + "key_details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_record_keys_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_record_keys: -1 rows /*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES - ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), - ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); + ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', NULL), + ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', NULL), + ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', NULL), + ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', NULL); /*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; +-- Dumping structure for table public.dataset_related_item +CREATE TABLE IF NOT EXISTS "dataset_related_item" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "relation_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_related_item: -1 rows /*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; INSERT INTO "dataset_related_item" ("id", "type", "relation_type", "dataset_id") VALUES ('00000000-0000-0000-0000-000000000001', 'main', 'main', '00000000-0000-0000-0000-000000000002'), - ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', '00000000-0000-0000-0000-000000000001'); + ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', NULL); /*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; +-- Dumping structure for table public.dataset_related_item_contributor +CREATE TABLE IF NOT EXISTS "dataset_related_item_contributor" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "name_type" VARCHAR NOT NULL, + "creator" BOOLEAN NOT NULL, + "contributor_type" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_contributor_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_related_item_contributor: -1 rows /*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; INSERT INTO "dataset_related_item_contributor" ("id", "name", "name_type", "creator", "contributor_type", "dataset_related_item_id") VALUES ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'string', 'true', 'owner', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; +-- Dumping structure for table public.dataset_related_item_identifier +CREATE TABLE IF NOT EXISTS "dataset_related_item_identifier" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "metadata_scheme" VARCHAR NOT NULL, + "scheme_uri" VARCHAR NOT NULL, + "scheme_type" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_identifier_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_related_item_identifier: -1 rows /*!40000 ALTER TABLE "dataset_related_item_identifier" DISABLE KEYS */; /*!40000 ALTER TABLE "dataset_related_item_identifier" ENABLE KEYS */; +-- Dumping structure for table public.dataset_related_item_other +CREATE TABLE IF NOT EXISTS "dataset_related_item_other" ( + "id" CHAR(36) NOT NULL, + "publication_year" VARCHAR NOT NULL, + "volume" VARCHAR NOT NULL, + "issue" VARCHAR NOT NULL, + "number_value" VARCHAR NOT NULL, + "number_type" VARCHAR NOT NULL, + "first_page" VARCHAR NOT NULL, + "last_page" BOOLEAN NOT NULL, + "publisher" VARCHAR NOT NULL, + "edition" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_other_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_related_item_other: -1 rows /*!40000 ALTER TABLE "dataset_related_item_other" DISABLE KEYS */; /*!40000 ALTER TABLE "dataset_related_item_other" ENABLE KEYS */; +-- Dumping structure for table public.dataset_related_item_title +CREATE TABLE IF NOT EXISTS "dataset_related_item_title" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "title" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_title_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_related_item_title: -1 rows /*!40000 ALTER TABLE "dataset_related_item_title" DISABLE KEYS */; /*!40000 ALTER TABLE "dataset_related_item_title" ENABLE KEYS */; +-- Dumping structure for table public.dataset_rights +CREATE TABLE IF NOT EXISTS "dataset_rights" ( + "id" CHAR(36) NOT NULL, + "rights" VARCHAR NOT NULL, + "uri" VARCHAR NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_scheme" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_rights_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_rights: -1 rows /*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; INSERT INTO "dataset_rights" ("id", "rights", "uri", "identifier", "identifier_scheme", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'), - ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', NULL), + ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', NULL); /*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; +-- Dumping structure for table public.dataset_subject +CREATE TABLE IF NOT EXISTS "dataset_subject" ( + "id" CHAR(36) NOT NULL, + "subject" VARCHAR NOT NULL, + "scheme" VARCHAR NOT NULL, + "scheme_uri" VARCHAR NOT NULL, + "value_uri" VARCHAR NOT NULL, + "classification_code" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_subject_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_subject: -1 rows /*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; INSERT INTO "dataset_subject" ("id", "subject", "scheme", "scheme_uri", "value_uri", "classification_code", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'), - ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', NULL), + ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', NULL); /*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; +-- Dumping structure for table public.dataset_title +CREATE TABLE IF NOT EXISTS "dataset_title" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "dataset_id" VARCHAR NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_title_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.dataset_title: -1 rows /*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES - ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); + ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', NULL); /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; +-- Dumping structure for table public.invited_study_contributor +CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( + "email_address" VARCHAR NOT NULL, + "permission" VARCHAR NOT NULL, + "invited_on" TIMESTAMP NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("email_address", "study_id"), + CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.invited_study_contributor: -1 rows --- done /*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES - ('Aliya_Herman@yahoo.com', 'editor', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Anastacio50@hotmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Edward0@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Jailyn17@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000002'); + ('aydan.gasimova@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('bhavesh.patel@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000003'), + ('sanjay.soundarajan@@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000004'); /*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; +-- Dumping structure for table public.participant +CREATE TABLE IF NOT EXISTS "participant" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "address" VARCHAR NOT NULL, + "age" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "participant_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.participant: -1 rows /*!40000 ALTER TABLE "participant" DISABLE KEYS */; INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES @@ -193,202 +484,494 @@ INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "c ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-29 15:15:35.891076', '2023-08-29 15:15:35.891076', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "participant" ENABLE KEYS */; +-- Dumping structure for table public.study +CREATE TABLE IF NOT EXISTS "study" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "image" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + PRIMARY KEY ("id") +); + -- Dumping data for table public.study: -1 rows --- done /*!40000 ALTER TABLE "study" DISABLE KEYS */; INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), - ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), - ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), - ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), - ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); - ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), - ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), - ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', '2023-08-03 12:33:10', '2023-01-03 12:33:11'); + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://loremflickr.com/640/480?lock=342651989655552', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-03 12:33:10', '2023-01-03 12:33:11'), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://loremflickr.com/640/480?lock=342651989655552', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), + ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://loremflickr.com/640/480?lock=342651989655552', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), + ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://loremflickr.com/640/480?lock=342651989655552', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); /*!40000 ALTER TABLE "study" ENABLE KEYS */; +-- Dumping structure for table public.study_arm +CREATE TABLE IF NOT EXISTS "study_arm" ( + "id" CHAR(36) NOT NULL, + "label" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "intervention_list" UNKNOWN NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_arm_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_arm: -1 rows --- done /*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Active Comparator', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000003'), + ('75edc7d3-ab7c-404d-a6dd-b55f7fe6446d', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('2b26a772-b4af-4e61-9e76-6642746b78ee', '', '', '', '{""}', '00000000-0000-0000-0000-000000000001'), + ('a82a5e49-a735-4ba3-ab2e-ba64e7fb464c', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000002'), + ('ba03826c-b9db-4517-aeaa-031793de4a25', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('a11728f0-fadb-4bd0-be09-511d5fb39649', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), + ('311fed5e-fd7a-4a02-8465-3b55a05cab04', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; +-- Dumping structure for table public.study_available_ipd +CREATE TABLE IF NOT EXISTS "study_available_ipd" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "url" VARCHAR NOT NULL, + "comment" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_available_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_available_ipd: -1 rows --- done /*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'AS2655AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'AS625AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'); /*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; +-- Dumping structure for table public.study_contact +CREATE TABLE IF NOT EXISTS "study_contact" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "affiliation" VARCHAR NOT NULL, + "role" VARCHAR NOT NULL, + "phone" VARCHAR NOT NULL, + "phone_ext" VARCHAR NOT NULL, + "email_address" VARCHAR NOT NULL, + "central_contact" BOOLEAN NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_contact_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_contact: -1 rows --- done /*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; INSERT INTO "study_contact" ("id", "first_name", "last_name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Johnston', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Rolfson', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Verner', 'Nolan', 'Monahan and Sons', '', '501-039-841', NULL, 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'Lela', 'Cormier', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000002', 'holly', 'sienna', 'calmi2', 'editor', '4056074345', 'ext', 'holly.sienna@gmail.com', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'), + ('81e71d41-2c93-47cb-9fac-00d94ab1c1a2', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; +-- Dumping structure for table public.study_contributor +CREATE TABLE IF NOT EXISTS "study_contributor" ( + "permission" VARCHAR NOT NULL, + "user_id" CHAR(36) NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("user_id"), + CONSTRAINT "study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_contributor_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_contributor: -1 rows --- done /*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), - ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), - ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), - ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), - ('viewer', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), - ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002'), - ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000003'), - ('viewer', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000003'), - ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003'), - ('owner', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000004'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000005'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000008'); + ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000004'), + ('editor', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000006'); /*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; +-- Dumping structure for table public.study_description +CREATE TABLE IF NOT EXISTS "study_description" ( + "id" CHAR(36) NOT NULL, + "brief_summary" VARCHAR NOT NULL, + "detailed_description" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_description_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_description: -1 rows --- done /*!40000 ALTER TABLE "study_description" DISABLE KEYS */; INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'study summary', 'big description', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'study summary', 'big description', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'study summary', 'big description', '00000000-0000-0000-0000-000000000003'); + ('00000000-0000-0000-0000-000000000001', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000003'), + ('f51a772e-373a-452a-8106-822840a76339', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_description" ENABLE KEYS */; +-- Dumping structure for table public.study_design +CREATE TABLE IF NOT EXISTS "study_design" ( + "id" CHAR(36) NOT NULL, + "design_allocation" VARCHAR NOT NULL, + "study_type" VARCHAR NOT NULL, + "design_interventional_model" VARCHAR NOT NULL, + "design_intervention_model_description" VARCHAR NOT NULL, + "design_primary_purpose" VARCHAR NOT NULL, + "design_masking" VARCHAR NOT NULL, + "design_masking_description" VARCHAR NOT NULL, + "design_who_masked_list" UNKNOWN NOT NULL, + "phase_list" UNKNOWN NOT NULL, + "enrollment_count" INTEGER NOT NULL, + "enrollment_type" VARCHAR NOT NULL, + "number_arms" INTEGER NOT NULL, + "design_observational_model_list" UNKNOWN NOT NULL, + "design_time_perspective_list" UNKNOWN NOT NULL, + "bio_spec_retention" VARCHAR NOT NULL, + "bio_spec_description" VARCHAR NOT NULL, + "target_duration" VARCHAR NOT NULL, + "number_groups_cohorts" INTEGER NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_design_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_design: -1 rows --- done /*!40000 ALTER TABLE "study_design" DISABLE KEYS */; INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_interventional_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Randomized', 'Interventional', 'Treatment', 'description', 'Single Group Assignment', 'Single', 'description', ARRAY ['Participant'], ARRAY ['Phase 1'], 20, 'Actual', 30, NULL, NULL, NULL, NULL, NULL, NULL, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', NULL, 'Observational', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 20, 'Actual', NULL, ARRAY ['Cohort'], ARRAY ['Retrospective'], 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000002', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Randomized', 'type', 'treatment of cancer', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '1 years', 10, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'Randomized', 'type', 'treatment', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{casecontrol}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3 months', 10, '00000000-0000-0000-0000-000000000002'), + ('2b1312ef-338b-454a-9e17-5db84e17d97c', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{[,'',P,a,r,t,i,c,i,p,a,n,t,'',]}', '{Trials}', 1, 'enrollmentInfo', 2, '{[,'',C,a,s,e,C,o,n,t,r,o,l,'',]}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), + ('ca5500a4-cbce-454a-a767-653461d59397', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{CaseControl}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_design" ENABLE KEYS */; +-- Dumping structure for table public.study_eligibility +CREATE TABLE IF NOT EXISTS "study_eligibility" ( + "id" CHAR(36) NOT NULL, + "gender" VARCHAR NOT NULL, + "gender_based" VARCHAR NOT NULL, + "gender_description" VARCHAR NOT NULL, + "healthy_volunteers" BOOLEAN NOT NULL, + "inclusion_criteria" UNKNOWN NOT NULL, + "exclusion_criteria" UNKNOWN NOT NULL, + "study_population" VARCHAR NOT NULL, + "sampling_method" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + "minimum_age_value" INTEGER NOT NULL, + "minimum_age_unit" VARCHAR NOT NULL, + "maximum_age_value" INTEGER NOT NULL, + "maximum_age_unit" VARCHAR NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_eligibility_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_eligibility: 6 rows --- done /*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id", "minimum_age_value", "minimum_age_unit", "maximum_age_value", "maximum_age_unit") VALUES - ('00000000-0000-0000-0000-000000000001', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], NULL, NULL, '00000000-0000-0000-0000-000000000001', 24, 'Years', 34, 'Years'), - ('00000000-0000-0000-0000-000000000002', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], 'Description', 'Probability Sample', '00000000-0000-0000-0000-000000000002', 24, 'Years', 34, 'Years'); + ('00000000-0000-0000-0000-000000000004', 'female', 'Correct', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 30, 'UCSD', 54, 'UW'), + ('dfac0d9e-a104-4f4b-ac1d-05f3699c72f3', 'female', 'Not given', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 23, 'UCSD', 32, 'UW'), + ('00000000-0000-0000-0000-000000000002', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 45, 'UCLA', 43, 'UCLA'), + ('00000000-0000-0000-0000-000000000001', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 24, 'UCSD', 34, 'UCLA'), + ('00000000-0000-0000-0000-000000000003', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 56, 'UCLA', 37, 'UCSD'), + ('01ac64ef-cfca-47bc-8f30-67525017461f', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 34, 'UW', 29, 'UW'); /*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; +-- Dumping structure for table public.study_identification +CREATE TABLE IF NOT EXISTS "study_identification" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "identifier_domain" VARCHAR NOT NULL, + "identifier_link" VARCHAR NOT NULL, + "secondary" BOOLEAN NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_identification_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_identification: -1 rows --- done /*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', FALSE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000002', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), + ('cfc1b66c-882a-4eee-a6d7-01a7cb018ac2', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; +-- Dumping structure for table public.study_intervention +CREATE TABLE IF NOT EXISTS "study_intervention" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "name" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "arm_group_label_list" UNKNOWN NOT NULL, + "other_name_list" UNKNOWN NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_intervention_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_intervention: -1 rows --- done /*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000002', 'Drug', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Procedure/Surgery', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'Radiation', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000004'), + ('70eecc49-2c32-47a4-a176-2abb57334fab', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('ede01416-9693-4095-bdae-a2c144a9ec82', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('f6c68d25-8a1c-47ec-9b8d-4db36cf3fecd', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'Device', 'intervention name updatee', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), + ('65ef7ce9-4992-47a1-8a86-355792ca6fbc', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; +-- Dumping structure for table public.study_ipdsharing +CREATE TABLE IF NOT EXISTS "study_ipdsharing" ( + "id" CHAR(36) NOT NULL, + "ipd_sharing" VARCHAR NOT NULL, + "ipd_sharing_description" VARCHAR NOT NULL, + "ipd_sharing_info_type_list" UNKNOWN NOT NULL, + "ipd_sharing_time_frame" VARCHAR NOT NULL, + "ipd_sharing_access_criteria" VARCHAR NOT NULL, + "ipd_sharing_url" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_ipdsharing_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_ipdsharing: -1 rows --- done /*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000003'), + ('ebfe1211-763e-4b10-8e15-7ccb29cb21f5', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; +-- Dumping structure for table public.study_link +CREATE TABLE IF NOT EXISTS "study_link" ( + "id" CHAR(36) NOT NULL, + "url" VARCHAR NOT NULL, + "title" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_link_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_link: -1 rows --- done /*!40000 ALTER TABLE "study_link" DISABLE KEYS */; INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), + ('e354922c-9ab3-4b38-ba79-c4d4640737d2', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), + ('040d305e-504d-433b-b5c2-7d56c24d440a', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_link" ENABLE KEYS */; +-- Dumping structure for table public.study_location +CREATE TABLE IF NOT EXISTS "study_location" ( + "id" CHAR(36) NOT NULL, + "facility" VARCHAR NOT NULL, + "status" VARCHAR NOT NULL, + "city" VARCHAR NOT NULL, + "state" VARCHAR NOT NULL, + "zip" VARCHAR NOT NULL, + "country" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_location_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_location: -1 rows --- done /*!40000 ALTER TABLE "study_location" DISABLE KEYS */; INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000005', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000004'), + ('cda2dc03-95cf-494a-87ea-aac49ac07f0b', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'), + ('72d6a140-e57b-4ba4-a57d-391cdc871c21', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_location" ENABLE KEYS */; +-- Dumping structure for table public.study_other +CREATE TABLE IF NOT EXISTS "study_other" ( + "id" CHAR(36) NOT NULL, + "oversight_has_dmc" BOOLEAN NOT NULL, + "conditions" UNKNOWN NOT NULL, + "keywords" UNKNOWN NOT NULL, + "size" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_other_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_other: -1 rows --- done /*!40000 ALTER TABLE "study_other" DISABLE KEYS */; INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', TRUE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '1 GB', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', FALSE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '3 GB', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000002', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000003', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'), + ('cd440fa9-988b-4d51-8b66-8c2e42c630b3', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_other" ENABLE KEYS */; +-- Dumping structure for table public.study_overall_official +CREATE TABLE IF NOT EXISTS "study_overall_official" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "affiliation" VARCHAR NOT NULL, + "role" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_overall_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_overall_official: -1 rows --- done /*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; INSERT INTO "study_overall_official" ("id", "first_name", "last_name", "affiliation", "role", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Bashirian', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Grady', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Maiya', 'Bartoletti', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000003'), + ('a0806089-6602-48b0-b870-1d5e91b956a5', 'firstname', 'lastname', 'affiliation', 'Study Chair', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; +-- Dumping structure for table public.study_reference +CREATE TABLE IF NOT EXISTS "study_reference" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "citation" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_reference_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_reference: 6 rows --- done /*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'Yes', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'PMID1A2234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000002', 'The PubMed Unique Identifier ', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'The PubMed Unique Identifier ', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), + ('2996e115-8c44-4914-a470-2764ff280316', 'The PubMed Unique Identifier ', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'The PubMed Unique Identifier ', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'The PubMed Unique Identifier ', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000005', 'The PubMed Unique Identifier ', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000004'); /*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; +-- Dumping structure for table public.study_sponsors_collaborators +CREATE TABLE IF NOT EXISTS "study_sponsors_collaborators" ( + "id" CHAR(36) NOT NULL, + "responsible_party_type" VARCHAR NOT NULL, + "responsible_party_investigator_name" VARCHAR NOT NULL, + "responsible_party_investigator_title" VARCHAR NOT NULL, + "responsible_party_investigator_affiliation" VARCHAR NOT NULL, + "lead_sponsor_name" VARCHAR NOT NULL, + "collaborator_name" UNKNOWN NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_sponsors_collaborators_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_sponsors_collaborators: -1 rows /*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; --- done -INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_first_name", "responsible_party_investigator_last_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name","collaborator_name", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Principal Investigator', 'Sean', 'West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Principal Investigator', 'Sean', 'East', 'Title 1', 'Medhurst Inc', 'Maiya Bartoletti', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name", "collaborator_name", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000005', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), + ('687dea6a-4dbf-45dc-867e-de7b303d4b0c', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; +-- Dumping structure for table public.study_status +CREATE TABLE IF NOT EXISTS "study_status" ( + "id" CHAR(36) NOT NULL, + "overall_status" VARCHAR NOT NULL, + "why_stopped" VARCHAR NOT NULL, + "start_date" TIMESTAMP NOT NULL, + "start_date_type" VARCHAR NOT NULL, + "completion_date" TIMESTAMP NOT NULL, + "completion_date_type" VARCHAR NOT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.study_status: -1 rows --- done /*!40000 ALTER TABLE "study_status" DISABLE KEYS */; INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Recruiting', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Anticipated', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Suspended', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2021-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), + ('8100ce8e-406d-4483-bc47-634e97c34713', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "study_status" ENABLE KEYS */; +-- Dumping structure for table public.user +CREATE TABLE IF NOT EXISTS "user" ( + "id" CHAR(36) NOT NULL, + "email_address" VARCHAR NOT NULL, + "username" VARCHAR NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "orcid" VARCHAR NOT NULL, + "hash" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "institution" VARCHAR NOT NULL, + PRIMARY KEY ("id") +); + -- Dumping data for table public.user: -1 rows --- done /*!40000 ALTER TABLE "user" DISABLE KEYS */; INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES - ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', '2023-08-13 12:34:06', 'Schinner, Kuvalis and Beatty'), - ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', '2023-08-13 12:34:06', 'Schmitt Inc'), - ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', '2023-08-13 12:34:06', 'Stracke, Leuschke and Kuvalis'), - ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', '2023-08-13 12:34:06', 'Heidenreich, Wilkinson and Mitchell'); - ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', '2023-08-13 12:34:06', 'Heaney, Russel and Turner'); + ('00000000-0000-0000-0000-000000000001', 'bhavesh.patel@gmail.com', 'bhavesh', 'Bhavesh', 'Patel', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000002', 'sanjay.soundarajan@gmail.com', 'sanjay', 'sanjay', 'soundarajan', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000003', 'billy.sanders@gmail.com', 'billy', 'billy', 'sanders', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), + ('00000000-0000-0000-0000-000000000004', 'james.lilly@gmail.com', 'james', 'james', 'lilly', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'); /*!40000 ALTER TABLE "user" ENABLE KEYS */; +-- Dumping structure for table public.version +CREATE TABLE IF NOT EXISTS "version" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "published" BOOLEAN NOT NULL, + "changelog" VARCHAR NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + "doi" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "published_on" TIMESTAMP NOT NULL, + "dataset_id" CHAR(36) NULL DEFAULT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_version_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.version: -1 rows --- done /*!40000 ALTER TABLE "version" DISABLE KEYS */; INSERT INTO "version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); + ('00000000-0000-0000-0000-000000000002', 'AIREADI1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'AIREADI4', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000004'), + ('00000000-0000-0000-0000-000000000003', 'AIREADI3', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', NULL); /*!40000 ALTER TABLE "version" ENABLE KEYS */; +-- Dumping structure for table public.version_participants +CREATE TABLE IF NOT EXISTS "version_participants" ( + "dataset_version_id" CHAR(36) NOT NULL, + "participant_id" CHAR(36) NOT NULL, + PRIMARY KEY ("dataset_version_id", "participant_id"), + CONSTRAINT "version_participants_dataset_version_id_fkey" FOREIGN KEY ("dataset_version_id") REFERENCES "version" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "version_participants_participant_id_fkey" FOREIGN KEY ("participant_id") REFERENCES "participant" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + -- Dumping data for table public.version_participants: -1 rows /*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES @@ -398,8 +981,6 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; -COMMIT; - /*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; From 5788937e1e0fcffc681270b45fddeb2d5bc5dff4 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 31 Aug 2023 23:00:21 +0000 Subject: [PATCH 063/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dataset.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apis/dataset.py b/apis/dataset.py index 86436b75..e97d41d7 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -87,6 +87,7 @@ def delete(self, study_id, dataset_id): # db.session.commit() # return Response(status=204) + @api.route("/study//dataset//version/") class Version(Resource): @api.response(201, "Success") From 828701f5e0fea3432cbd1657829e2ac48b9d4cfb Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 31 Aug 2023 16:47:37 -0700 Subject: [PATCH 064/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init=20data?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- db-docker-compose.yaml | 12 +- dev-docker-compose.yaml | 14 +- sql/init.sql | 712 ++++++++++++++++++++++++++++++++++++---- 3 files changed, 652 insertions(+), 86 deletions(-) diff --git a/db-docker-compose.yaml b/db-docker-compose.yaml index c30c55d5..49500c44 100644 --- a/db-docker-compose.yaml +++ b/db-docker-compose.yaml @@ -4,19 +4,19 @@ services: image: postgres:9.6 restart: always environment: - POSTGRES_PASSWORD: postgres - POSTGRES_USER: postgres - POSTGRES_DB: postgres + POSTGRES_USER: admin + POSTGRES_PASSWORD: root + POSTGRES_DB: fairhub_local ports: - 5432:5432 volumes: - - ./postgres_data:/var/lib/postgresql/data + # - ./postgres-data:/var/lib/postgresql/data - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql # pgadmin: # image: dpage/pgadmin4 # restart: always # environment: - # PGADMIN_DEFAULT_EMAIL:postgres - # PGADMIN_DEFAULT_PASSWORD:postgres + # PGADMIN_DEFAULT_EMAIL: admin@admin.com + # PGADMIN_DEFAULT_PASSWORD: root # ports: # - 5050:80 \ No newline at end of file diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 47289383..12dac0f2 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -17,16 +17,14 @@ services: FLASK_DEBUG: 1 FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" database: - image: postgres:15.3 + image: postgres:9.6 environment: - - POSTGRES_USER=POSTGRES_USER - - POSTGRES_PASSWORD=POSTGRES_PASSWORD - - POSTGRES_DB=POSTGRES_DB + - POSTGRES_USER: admin + - POSTGRES_PASSWORD: root + - POSTGRES_DB: fairhub_local ports: - 5432:5432 restart: always volumes: - - ./postgres_data:/var/lib/postgresql/data - - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql -# volumes: -# db-data: + - ./postgres-data:/var/lib/postgresql/data + # - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql diff --git a/sql/init.sql b/sql/init.sql index f7386abb..52bcb107 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -6,6 +6,634 @@ -- -------------------------------------------------------- BEGIN; + +-- Dumping structure for table public.study +CREATE TABLE IF NOT EXISTS "study" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "image" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + PRIMARY KEY ("id") +); + +-- Dumping structure for table public.user +CREATE TABLE IF NOT EXISTS "user" ( + "id" CHAR(36) NOT NULL, + "email_address" VARCHAR NOT NULL, + "username" VARCHAR NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "orcid" VARCHAR NOT NULL, + "hash" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "institution" VARCHAR NOT NULL, + PRIMARY KEY ("id") +); + +-- Dumping structure for table public.study_contributor +CREATE TABLE IF NOT EXISTS "study_contributor" ( + "permission" VARCHAR NOT NULL, + "user_id" CHAR(36) NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("user_id", "study_id"), + CONSTRAINT "study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_contributor_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.invited_study_contributor +CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( + "email_address" VARCHAR NOT NULL, + "permission" VARCHAR NOT NULL, + "invited_on" TIMESTAMP NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("email_address", "study_id"), + CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset +CREATE TABLE IF NOT EXISTS "dataset" ( + "id" CHAR(36) NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_access +CREATE TABLE IF NOT EXISTS "dataset_access" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "url" VARCHAR NOT NULL, + "url_last_checked" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_access_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_alternate_identifier +CREATE TABLE IF NOT EXISTS "dataset_alternate_identifier" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_identifier_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_consent +CREATE TABLE IF NOT EXISTS "dataset_consent" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "noncommercial" BOOLEAN NOT NULL, + "geog_restrict" BOOLEAN NOT NULL, + "research_type" BOOLEAN NOT NULL, + "genetic_only" BOOLEAN NOT NULL, + "no_methods" BOOLEAN NOT NULL, + "details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_consent_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_contributor +CREATE TABLE IF NOT EXISTS "dataset_contributor" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "name_type" VARCHAR NOT NULL, + "name_identifier" VARCHAR NOT NULL, + "name_identifier_scheme" VARCHAR NOT NULL, + "name_identifier_scheme_uri" VARCHAR NOT NULL, + "creator" BOOLEAN NOT NULL, + "contributor_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_contributor_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_contributor_affiliation +CREATE TABLE IF NOT EXISTS "dataset_contributor_affiliation" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_scheme" VARCHAR NOT NULL, + "identifier_scheme_uri" VARCHAR NOT NULL, + "dataset_contributor_id" VARCHAR NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_contributor_affiliation_dataset_contributor_id_fkey" FOREIGN KEY ("dataset_contributor_id") REFERENCES "dataset_contributor" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_date +CREATE TABLE IF NOT EXISTS "dataset_date" ( + "id" CHAR(36) NOT NULL, + "date" VARCHAR NOT NULL, + "date_type" VARCHAR NOT NULL, + "data_information" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_date_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_description +CREATE TABLE IF NOT EXISTS "dataset_description" ( + "id" CHAR(36) NOT NULL, + "description" VARCHAR NOT NULL, + "description_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_description_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_de_ident_level +CREATE TABLE IF NOT EXISTS "dataset_de_ident_level" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "direct" BOOLEAN NOT NULL, + "hipaa" BOOLEAN NOT NULL, + "dates" BOOLEAN NOT NULL, + "nonarr" BOOLEAN NOT NULL, + "k_anon" BOOLEAN NOT NULL, + "details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_de_ident_level_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_funder +CREATE TABLE IF NOT EXISTS "dataset_funder" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "identifier_scheme_uri" VARCHAR NOT NULL, + "award_number" VARCHAR NOT NULL, + "award_uri" VARCHAR NOT NULL, + "award_title" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_funder_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_managing_organization +CREATE TABLE IF NOT EXISTS "dataset_managing_organization" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "ror_id" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_managing_organization_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_other +CREATE TABLE IF NOT EXISTS "dataset_other" ( + "id" CHAR(36) NOT NULL, + "language" VARCHAR NOT NULL, + "managing_organization_name" VARCHAR NOT NULL, + "managing_organization_ror_id" VARCHAR NOT NULL, + "size" VARCHAR NOT NULL, + "standards_followed" VARCHAR NOT NULL, + "acknowledgement" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_other_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_readme +CREATE TABLE IF NOT EXISTS "dataset_readme" ( + "id" CHAR(36) NOT NULL, + "content" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_record_keys +CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( + "id" CHAR(36) NOT NULL, + "key_type" VARCHAR NOT NULL, + "key_details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_record_keys_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_related_item +CREATE TABLE IF NOT EXISTS "dataset_related_item" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "relation_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_related_item_contributor +CREATE TABLE IF NOT EXISTS "dataset_related_item_contributor" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "name_type" VARCHAR NOT NULL, + "creator" BOOLEAN NOT NULL, + "contributor_type" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_contributor_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_related_item_identifier +CREATE TABLE IF NOT EXISTS "dataset_related_item_identifier" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "metadata_scheme" VARCHAR NOT NULL, + "scheme_uri" VARCHAR NOT NULL, + "scheme_type" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_identifier_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_related_item_other +CREATE TABLE IF NOT EXISTS "dataset_related_item_other" ( + "id" CHAR(36) NOT NULL, + "publication_year" VARCHAR NOT NULL, + "volume" VARCHAR NOT NULL, + "issue" VARCHAR NOT NULL, + "number_value" VARCHAR NOT NULL, + "number_type" VARCHAR NOT NULL, + "first_page" VARCHAR NOT NULL, + "last_page" BOOLEAN NOT NULL, + "publisher" VARCHAR NOT NULL, + "edition" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_other_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_related_item_title +CREATE TABLE IF NOT EXISTS "dataset_related_item_title" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "title" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_title_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_rights +CREATE TABLE IF NOT EXISTS "dataset_rights" ( + "id" CHAR(36) NOT NULL, + "rights" VARCHAR NOT NULL, + "uri" VARCHAR NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_scheme" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_rights_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_subject +CREATE TABLE IF NOT EXISTS "dataset_subject" ( + "id" CHAR(36) NOT NULL, + "subject" VARCHAR NOT NULL, + "scheme" VARCHAR NOT NULL, + "scheme_uri" VARCHAR NOT NULL, + "value_uri" VARCHAR NOT NULL, + "classification_code" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_subject_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.dataset_title +CREATE TABLE IF NOT EXISTS "dataset_title" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "dataset_id" VARCHAR NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_title_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + + +-- Dumping structure for table public.participant +CREATE TABLE IF NOT EXISTS "participant" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "address" VARCHAR NOT NULL, + "age" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "participant_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_arm +CREATE TABLE IF NOT EXISTS "study_arm" ( + "id" CHAR(36) NOT NULL, + "label" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "intervention_list" VARCHAR[] NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_arm_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_available_ipd +CREATE TABLE IF NOT EXISTS "study_available_ipd" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "url" VARCHAR NOT NULL, + "comment" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_available_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_contact +CREATE TABLE IF NOT EXISTS "study_contact" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "affiliation" VARCHAR NOT NULL, + "role" VARCHAR, + "phone" VARCHAR NOT NULL, + "phone_ext" VARCHAR NOT NULL, + "email_address" VARCHAR NOT NULL, + "central_contact" BOOLEAN NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_contact_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_description +CREATE TABLE IF NOT EXISTS "study_description" ( + "id" CHAR(36) NOT NULL, + "brief_summary" VARCHAR NOT NULL, + "detailed_description" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_description_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_design +CREATE TABLE IF NOT EXISTS "study_design" ( + "id" CHAR(36) NOT NULL, + "design_allocation" VARCHAR, + "study_type" VARCHAR NOT NULL, + "design_interventional_model" VARCHAR, + "design_intervention_model_description" VARCHAR, + "design_primary_purpose" VARCHAR, + "design_masking" VARCHAR, + "design_masking_description" VARCHAR, + "design_who_masked_list" VARCHAR[], + "phase_list" VARCHAR[], + "enrollment_count" INTEGER NOT NULL, + "enrollment_type" VARCHAR NOT NULL, + "number_arms" INTEGER, + "design_observational_model_list" VARCHAR[], + "design_time_perspective_list" VARCHAR[], + "bio_spec_retention" VARCHAR, + "bio_spec_description" VARCHAR, + "target_duration" VARCHAR, + "number_groups_cohorts" INTEGER, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_design_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_eligibility +CREATE TABLE IF NOT EXISTS "study_eligibility" ( + "id" CHAR(36) NOT NULL, + "gender" VARCHAR NOT NULL, + "gender_based" VARCHAR NOT NULL, + "gender_description" VARCHAR NOT NULL, + "healthy_volunteers" BOOLEAN NOT NULL, + "inclusion_criteria" VARCHAR[] NOT NULL, + "exclusion_criteria" VARCHAR[] NOT NULL, + "study_population" VARCHAR, + "sampling_method" VARCHAR, + "study_id" CHAR(36) NOT NULL, + "minimum_age_value" INTEGER NOT NULL, + "minimum_age_unit" VARCHAR NOT NULL, + "maximum_age_value" INTEGER NOT NULL, + "maximum_age_unit" VARCHAR NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_eligibility_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_identification +CREATE TABLE IF NOT EXISTS "study_identification" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "identifier_domain" VARCHAR NOT NULL, + "identifier_link" VARCHAR NOT NULL, + "secondary" BOOLEAN NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_identification_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_intervention +CREATE TABLE IF NOT EXISTS "study_intervention" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "name" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "arm_group_label_list" VARCHAR[] NOT NULL, + "other_name_list" VARCHAR[] NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_intervention_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_ipdsharing +CREATE TABLE IF NOT EXISTS "study_ipdsharing" ( + "id" CHAR(36) NOT NULL, + "ipd_sharing" VARCHAR NOT NULL, + "ipd_sharing_description" VARCHAR NOT NULL, + "ipd_sharing_info_type_list" VARCHAR[] NOT NULL, + "ipd_sharing_time_frame" VARCHAR NOT NULL, + "ipd_sharing_access_criteria" VARCHAR NOT NULL, + "ipd_sharing_url" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_ipdsharing_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_link +CREATE TABLE IF NOT EXISTS "study_link" ( + "id" CHAR(36) NOT NULL, + "url" VARCHAR NOT NULL, + "title" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_link_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_location +CREATE TABLE IF NOT EXISTS "study_location" ( + "id" CHAR(36) NOT NULL, + "facility" VARCHAR NOT NULL, + "status" VARCHAR NOT NULL, + "city" VARCHAR NOT NULL, + "state" VARCHAR NOT NULL, + "zip" VARCHAR NOT NULL, + "country" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_location_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_other +CREATE TABLE IF NOT EXISTS "study_other" ( + "id" CHAR(36) NOT NULL, + "oversight_has_dmc" BOOLEAN NOT NULL, + "conditions" VARCHAR[] NOT NULL, + "keywords" VARCHAR[] NOT NULL, + "size" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_other_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_overall_official +CREATE TABLE IF NOT EXISTS "study_overall_official" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "affiliation" VARCHAR NOT NULL, + "role" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_overall_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_reference +CREATE TABLE IF NOT EXISTS "study_reference" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "citation" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_reference_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_sponsors_collaborators +CREATE TABLE IF NOT EXISTS "study_sponsors_collaborators" ( + "id" CHAR(36) NOT NULL, + "responsible_party_type" VARCHAR NOT NULL, + "responsible_party_investigator_name" VARCHAR NOT NULL, + "responsible_party_investigator_title" VARCHAR NOT NULL, + "responsible_party_investigator_affiliation" VARCHAR NOT NULL, + "lead_sponsor_name" VARCHAR NOT NULL, + "collaborator_name" VARCHAR[] NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_sponsors_collaborators_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.study_status +CREATE TABLE IF NOT EXISTS "study_status" ( + "id" CHAR(36) NOT NULL, + "overall_status" VARCHAR NOT NULL, + "why_stopped" VARCHAR NOT NULL, + "start_date" TIMESTAMP NOT NULL, + "start_date_type" VARCHAR NOT NULL, + "completion_date" TIMESTAMP NOT NULL, + "completion_date_type" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.version +CREATE TABLE IF NOT EXISTS "version" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "published" BOOLEAN NOT NULL, + "changelog" VARCHAR NOT NULL, + "updated_on" TIMESTAMP NOT NULL, + "doi" VARCHAR NOT NULL, + "created_at" TIMESTAMP NOT NULL, + "published_on" TIMESTAMP NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_version_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping structure for table public.version_participants +CREATE TABLE IF NOT EXISTS "version_participants" ( + "dataset_version_id" CHAR(36) NOT NULL, + "participant_id" CHAR(36) NOT NULL, + PRIMARY KEY ("dataset_version_id", "participant_id"), + CONSTRAINT "version_participants_dataset_version_id_fkey" FOREIGN KEY ("dataset_version_id") REFERENCES "version" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "version_participants_participant_id_fkey" FOREIGN KEY ("participant_id") REFERENCES "participant" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study: -1 rows +-- done +/*!40000 ALTER TABLE "study" DISABLE KEYS */; +INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), + ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), + ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', '2021-08-03 12:33:10', '2023-05-03 12:33:11'), + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', '2023-08-03 12:33:10', '2023-01-03 12:33:11'); +/*!40000 ALTER TABLE "study" ENABLE KEYS */; + +-- Dumping data for table public.user: -1 rows +-- done +/*!40000 ALTER TABLE "user" DISABLE KEYS */; +INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES + ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', '2023-08-13 12:34:06', 'Schinner, Kuvalis and Beatty'), + ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', '2023-08-13 12:34:06', 'Schmitt Inc'), + ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', '2023-08-13 12:34:06', 'Stracke, Leuschke and Kuvalis'), + ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', '2023-08-13 12:34:06', 'Heidenreich, Wilkinson and Mitchell'), + ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', '2023-08-13 12:34:06', 'Heaney, Russel and Turner'); +/*!40000 ALTER TABLE "user" ENABLE KEYS */; + +-- Dumping data for table public.study_contributor: -1 rows +-- done +/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; +INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'), + ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('viewer', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002'), + ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000003'), + ('viewer', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000003'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003'), + ('owner', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000004'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000005'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000008'); +/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; + +-- Dumping data for table public.invited_study_contributor: -1 rows +-- done +/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; +INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES + ('Aliya_Herman@yahoo.com', 'editor', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Anastacio50@hotmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Edward0@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), + ('Jailyn17@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; + -- Dumping data for table public.dataset: -1 rows -- done /*!40000 ALTER TABLE "dataset" DISABLE KEYS */; @@ -14,19 +642,15 @@ INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000006', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; -- Dumping data for table public.dataset_access: -1 rows /*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'main', 'Clinical research studies ', 'https://aireadi.org', '1st August', NULL), - ('badac1ab-26fd-4f94-b2b4-b198365a198f', 'none', '', '', '', NULL), - ('6d2c020f-71b1-48d2-8532-89a563868fa4', 'none', '', '', '', NULL), - ('f8f3bf91-2eb9-49b8-a8f0-1c92def99bcf', 'none', '', '', '', NULL), - ('fdc10b6d-2dc6-41c1-b43e-202a24abc80a', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'), - ('395d37d9-e3cf-4989-81f6-21dd2202d1ca', 'none', '', '', '', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; -- Dumping data for table public.dataset_alternate_identifier: 3 rows @@ -166,16 +790,6 @@ INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; --- Dumping data for table public.invited_study_contributor: -1 rows --- done -/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; -INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES - ('Aliya_Herman@yahoo.com', 'editor', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Anastacio50@hotmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Edward0@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Jailyn17@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; - -- Dumping data for table public.participant: -1 rows /*!40000 ALTER TABLE "participant" DISABLE KEYS */; INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES @@ -193,20 +807,6 @@ INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "c ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-29 15:15:35.891076', '2023-08-29 15:15:35.891076', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "participant" ENABLE KEYS */; --- Dumping data for table public.study: -1 rows --- done -/*!40000 ALTER TABLE "study" DISABLE KEYS */; -INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), - ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), - ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), - ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), - ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); - ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), - ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), - ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', '2023-08-03 12:33:10', '2023-01-03 12:33:11'); -/*!40000 ALTER TABLE "study" ENABLE KEYS */; - -- Dumping data for table public.study_arm: -1 rows -- done /*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; @@ -231,31 +831,10 @@ INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", INSERT INTO "study_contact" ("id", "first_name", "last_name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Johnston', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Rolfson', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Verner', 'Nolan', 'Monahan and Sons', '', '501-039-841', NULL, 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Verner', 'Nolan', 'Monahan and Sons', NULL, '501-039-841', '', 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000004', 'Lela', 'Cormier', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; --- Dumping data for table public.study_contributor: -1 rows --- done -/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; -INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), - ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), - ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), - ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'), - ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), - ('viewer', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), - ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002'), - ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000003'), - ('viewer', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000003'), - ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003'), - ('owner', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000004'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000005'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000008'); -/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; - -- Dumping data for table public.study_description: -1 rows -- done /*!40000 ALTER TABLE "study_description" DISABLE KEYS */; @@ -285,10 +864,10 @@ INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_descrip -- done /*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', FALSE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', "", 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', FALSE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; -- Dumping data for table public.study_intervention: -1 rows @@ -355,9 +934,9 @@ INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id -- Dumping data for table public.study_sponsors_collaborators: -1 rows /*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; -- done -INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_first_name", "responsible_party_investigator_last_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name","collaborator_name", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Principal Investigator', 'Sean', 'West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Principal Investigator', 'Sean', 'East', 'Title 1', 'Medhurst Inc', 'Maiya Bartoletti', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name","collaborator_name", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Principal Investigator', 'Sean West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Principal Investigator', 'Sean East', 'Title 1', 'Medhurst Inc', 'Maiya Bartoletti', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; -- Dumping data for table public.study_status: -1 rows @@ -368,17 +947,6 @@ INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", ('00000000-0000-0000-0000-000000000002', 'Suspended', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_status" ENABLE KEYS */; --- Dumping data for table public.user: -1 rows --- done -/*!40000 ALTER TABLE "user" DISABLE KEYS */; -INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES - ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', '2023-08-13 12:34:06', 'Schinner, Kuvalis and Beatty'), - ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', '2023-08-13 12:34:06', 'Schmitt Inc'), - ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', '2023-08-13 12:34:06', 'Stracke, Leuschke and Kuvalis'), - ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', '2023-08-13 12:34:06', 'Heidenreich, Wilkinson and Mitchell'); - ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', '2023-08-13 12:34:06', 'Heaney, Russel and Turner'); -/*!40000 ALTER TABLE "user" ENABLE KEYS */; - -- Dumping data for table public.version: -1 rows -- done /*!40000 ALTER TABLE "version" DISABLE KEYS */; @@ -386,7 +954,7 @@ INSERT INTO "version" ("id", "title", "published", "changelog", "updated_on", "d ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); + ('00000000-0000-0000-0000-000000000004', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); /*!40000 ALTER TABLE "version" ENABLE KEYS */; -- Dumping data for table public.version_participants: -1 rows From b07cbea5a0bec9f48acc528de633a1e01b7d65ef Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 31 Aug 2023 16:51:08 -0700 Subject: [PATCH 065/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init=20data?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev-docker-compose.yaml | 2 +- sql/init.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 12dac0f2..cc3faab3 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -15,7 +15,7 @@ services: environment: FLASK_ENV: development FLASK_DEBUG: 1 - FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" + FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://admin:root@database:5432/fairhub_local}" database: image: postgres:9.6 environment: diff --git a/sql/init.sql b/sql/init.sql index 52bcb107..f4b4218d 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -650,7 +650,7 @@ INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES /*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES ('00000000-0000-0000-0000-000000000001', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000002'); + ('00000000-0000-0000-0000-000000000002', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; -- Dumping data for table public.dataset_alternate_identifier: 3 rows From 43d311d5574b4d3616f207c3b749a85f8ce33333 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 31 Aug 2023 17:02:14 -0700 Subject: [PATCH 066/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init=20data?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 18 ++++++++++++++++++ dev-docker-compose.yaml | 20 ++++++++++++++------ 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 301c05ba..98a45277 100644 --- a/README.md +++ b/README.md @@ -85,6 +85,24 @@ If you would like to update the api, please follow the instructions below. ## Docker +### Database + +The api uses a postgres database. You can run a postgres database locally using docker: + +```bash +docker-compose -f ./db-docker-compose.yml up +``` + +Close the database with: + +```bash +docker-compose -f ./db-docker-compose.yml down -v +``` + +This database will not persist data between runs. + +### API + If you would like to run the api locally, you can use docker. 1. Build the docker image: diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index cc3faab3..5763a078 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -15,16 +15,24 @@ services: environment: FLASK_ENV: development FLASK_DEBUG: 1 - FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://admin:root@database:5432/fairhub_local}" + FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://admin:root@localhost:5432/fairhub_local}" + depends_on: + database: + condition: service_healthy database: image: postgres:9.6 environment: - - POSTGRES_USER: admin - - POSTGRES_PASSWORD: root - - POSTGRES_DB: fairhub_local + POSTGRES_USER: admin + POSTGRES_PASSWORD: root + POSTGRES_DB: fairhub_local ports: - 5432:5432 restart: always + healthcheck: + test: "pg_isready --username=fairhub_local && psql --username=admin --list" + interval: 5s + timeout: 5s + retries: 5 volumes: - - ./postgres-data:/var/lib/postgresql/data - # - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql + # - ./postgres-data:/var/lib/postgresql/data + - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql From 40e774ed4b2a39789aeb8adec8c88ad1e199027b Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 31 Aug 2023 17:09:55 -0700 Subject: [PATCH 067/505] wip: delete dataset --- apis/dataset.py | 29 ++++++++++++++++++++++------- apis/study.py | 2 +- 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/apis/dataset.py b/apis/dataset.py index 86436b75..6ee452a6 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -62,6 +62,7 @@ def post(self, study_id): @api.response(400, "Validation Error") class DatasetResource(Resource): def put(self, study_id, dataset_id): + data = request.json data_obj = Dataset.query.get(dataset_id) data_obj.update(data) @@ -69,14 +70,28 @@ def put(self, study_id, dataset_id): return data_obj.to_dict() def delete(self, study_id, dataset_id): - data = request.json - study_obj = Study.query.get(study_id) - for dataset_ in study_obj.dataset: - db.session.delete(dataset_) - db.session.commit() - db.session.delete(study_obj) + data_obj = Dataset.query.get(dataset_id) + for version in data_obj.dataset_versions: + db.session.delete(version) + db.session.delete(data_obj) db.session.commit() - return Response(status=204) + return '', 204 + # + # + # delete_study = Study.query.get(study_id) + # for d in delete_study.dataset: + # for version in d.dataset_versions: + # version.participants.clear() + # for d in delete_study.dataset: + # for version in d .dataset_versions: + # db.session.delete(version) + # db.session.delete(d) + # for p in delete_study.participants: + # db.session.delete(p) + # db.session.delete(delete_study) + # db.session.commit() + # return "", 204 + # # def delete(self, study_id, dataset_id, version_id): # data_obj = Dataset.query.get(dataset_id) diff --git a/apis/study.py b/apis/study.py index 61803787..ec1ec5eb 100644 --- a/apis/study.py +++ b/apis/study.py @@ -71,7 +71,7 @@ def delete(self, study_id: int): for version in d.dataset_versions: version.participants.clear() for d in delete_study.dataset: - for version in d.dataset_versions: + for version in d .dataset_versions: db.session.delete(version) db.session.delete(d) for p in delete_study.participants: From 087c460de5858c219fd8782bdec6392fc9c9d48d Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 1 Sep 2023 10:14:49 -0700 Subject: [PATCH 068/505] feat: model classes are modified --- apis/study_metadata/study_arm.py | 2 +- db-docker-compose.yaml | 4 +-- model/study_contributor.py | 4 +-- model/study_metadata/study_contact.py | 2 +- model/study_metadata/study_design.py | 32 +++++++++++------------ model/study_metadata/study_eligibility.py | 4 +-- 6 files changed, 24 insertions(+), 24 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 559daaee..bc902ade 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -44,7 +44,7 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - +# todo delete # @api.route("/study//metadata/arm/") # class StudyArmUpdate(Resource): # def put(self, study_id: int, arm_id: int): diff --git a/db-docker-compose.yaml b/db-docker-compose.yaml index 49500c44..55588f95 100644 --- a/db-docker-compose.yaml +++ b/db-docker-compose.yaml @@ -1,7 +1,7 @@ version: '3' services: postgres: - image: postgres:9.6 + image: postgres:latest restart: always environment: POSTGRES_USER: admin @@ -10,7 +10,7 @@ services: ports: - 5432:5432 volumes: - # - ./postgres-data:/var/lib/postgresql/data + - ./postgres-data:/var/lib/postgresql/data - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql # pgadmin: # image: dpage/pgadmin4 diff --git a/model/study_contributor.py b/model/study_contributor.py index 29a629b0..36b00b27 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -10,9 +10,9 @@ def __init__(self): __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) - user = db.relationship("User", back_populates="study_contributors") + user = db.relationship("User", back_populates="study_contributors", ) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) study = db.relationship("Study", back_populates="study_contributors") def to_dict(self): diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index f510d51b..0eb952a8 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -15,7 +15,7 @@ def __init__(self, study): first_name = db.Column(db.String, nullable=False) last_name = db.Column(db.String, nullable=False) affiliation = db.Column(db.String, nullable=False) - role = db.Column(db.String, nullable=False) + role = db.Column(db.String, nullable=True) phone = db.Column(db.String, nullable=False) phone_ext = db.Column(db.String, nullable=False) email_address = db.Column(db.String, nullable=False) diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 612e83e1..a921069d 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -15,26 +15,26 @@ def __init__(self, study): __tablename__ = "study_design" id = db.Column(db.CHAR(36), primary_key=True) - design_allocation = db.Column(db.String, nullable=False) + design_allocation = db.Column(db.String, nullable=True) study_type = db.Column(db.String, nullable=False) - design_interventional_model = db.Column(db.String, nullable=False) - design_intervention_model_description = db.Column(db.String, nullable=False) - design_primary_purpose = db.Column(db.String, nullable=False) - design_masking = db.Column(db.String, nullable=False) - design_masking_description = db.Column(db.String, nullable=False) - design_who_masked_list = db.Column(ARRAY(String), nullable=False) - phase_list = db.Column(ARRAY(String), nullable=False) + design_interventional_model = db.Column(db.String, nullable=True) + design_intervention_model_description = db.Column(db.String, nullable=True) + design_primary_purpose = db.Column(db.String, nullable=True) + design_masking = db.Column(db.String, nullable=True) + design_masking_description = db.Column(db.String, nullable=True) + design_who_masked_list = db.Column(ARRAY(String), nullable=True) + phase_list = db.Column(ARRAY(String), nullable=True) enrollment_count = db.Column(db.Integer, nullable=False) enrollment_type = db.Column(db.String, nullable=False) - number_arms = db.Column(db.Integer, nullable=False) - design_observational_model_list = db.Column(ARRAY(String), nullable=False) - design_time_perspective_list = db.Column(ARRAY(String), nullable=False) - bio_spec_retention = db.Column(db.String, nullable=False) - bio_spec_description = db.Column(db.String, nullable=False) - target_duration = db.Column(db.String, nullable=False) - number_groups_cohorts = db.Column(db.Integer, nullable=False) + number_arms = db.Column(db.Integer, nullable=True) + design_observational_model_list = db.Column(ARRAY(String), nullable=True) + design_time_perspective_list = db.Column(ARRAY(String), nullable=True) + bio_spec_retention = db.Column(db.String, nullable=True) + bio_spec_description = db.Column(db.String, nullable=True) + target_duration = db.Column(db.String, nullable=True) + number_groups_cohorts = db.Column(db.Integer, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=True) study = db.relationship("Study", back_populates="study_design") def to_dict(self): diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index e085fd1c..b6679576 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -24,8 +24,8 @@ def __init__(self, study): healthy_volunteers = db.Column(db.BOOLEAN, nullable=False) inclusion_criteria = db.Column(ARRAY(String), nullable=False) exclusion_criteria = db.Column(ARRAY(String), nullable=False) - study_population = db.Column(db.String, nullable=False) - sampling_method = db.Column(db.String, nullable=False) + study_population = db.Column(db.String, nullable=True) + sampling_method = db.Column(db.String, nullable=True) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_eligibility") From d89545d209ef1e7437bec3fc3e30fcf346dad920 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 1 Sep 2023 11:17:48 -0700 Subject: [PATCH 069/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20local=20d?= =?UTF-8?q?ocker?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev-docker-compose.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 5763a078..f4589595 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -5,7 +5,7 @@ services: context: . dockerfile: Dockerfile ports: - - "5000:5000" + - 5000:5000 # volumes: # - ./apis:/app/apis # - ./model:/app/model @@ -15,21 +15,21 @@ services: environment: FLASK_ENV: development FLASK_DEBUG: 1 - FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://admin:root@localhost:5432/fairhub_local}" + FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/postgres}" depends_on: database: condition: service_healthy database: image: postgres:9.6 environment: - POSTGRES_USER: admin - POSTGRES_PASSWORD: root - POSTGRES_DB: fairhub_local + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres ports: - 5432:5432 - restart: always + # restart: always healthcheck: - test: "pg_isready --username=fairhub_local && psql --username=admin --list" + test: "pg_isready --username=postgres && psql --username=postgres --list" interval: 5s timeout: 5s retries: 5 From 7ada8cb58f149f8b72d4422763f5b2a7f497da08 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 1 Sep 2023 12:16:26 -0700 Subject: [PATCH 070/505] fix: one to one responses fixed for Study metadata --- apis/study_metadata/study_description.py | 2 +- apis/study_metadata/study_eligibility.py | 12 +++++++----- apis/study_metadata/study_ipdsharing.py | 2 +- apis/study_metadata/study_other.py | 4 ++-- apis/study_metadata/study_sponsors_collaborators.py | 8 +++----- apis/study_metadata/study_status.py | 8 ++++---- model/study.py | 10 +++++----- model/study_metadata/study_eligibility.py | 2 +- 8 files changed, 24 insertions(+), 24 deletions(-) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index b3af6526..03840c40 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -26,7 +26,7 @@ class StudyDescriptionResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_description_ = study_.study_description - return [s.to_dict() for s in study_description_] + return study_description_.to_dict() def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 40b3e27d..79288e92 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -11,11 +11,13 @@ { "id": fields.String(required=True), "gender": fields.String(required=True), - "gender_based": fields.Boolean(required=True), + "gender_based": fields.String(required=True), "gender_description": fields.String(required=True), - "minimum_age": fields.String(required=True), - "maximum_age": fields.String(required=True), - "healthy_volunteers": fields.Boolean(required=True), + "minimum_age_value": fields.Integer(required=True), + "maximum_age_value": fields.Integer(required=True), + "minimum_age_unit": fields.String(required=True), + "maximum_age_unit": fields.String(required=True), + "healthy_volunteers": fields.String(required=True), "inclusion_criteria": fields.List(fields.String, required=True), "exclusion_criteria": fields.List(fields.String, required=True), "study_population": fields.String(required=True), @@ -34,7 +36,7 @@ class StudyEligibilityResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_eligibility_ = study_.study_eligibility - return [s.to_dict() for s in study_eligibility_] + return study_eligibility_.to_dict() def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index f1bd1c71..14acdef5 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -30,7 +30,7 @@ class StudyIpdsharingResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_ipdsharing_ = study_.study_ipdsharing - return [s.to_dict() for s in study_ipdsharing_] + return study_ipdsharing_.to_dict() def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 5a5b4e9b..0d1a0e22 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -12,7 +12,7 @@ "oversight_has_dmc": fields.String(required=True), "conditions": fields.String(required=True), "keywords": fields.String(required=True), - "size": fields.Integer(required=True), + "size": fields.String(required=True), }, ) @@ -27,7 +27,7 @@ class StudyOtherResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_other_ = study_.study_other - return [s.to_dict() for s in study_other_] + return study_other_.to_dict() def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 3e159174..588fd40f 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -11,12 +11,10 @@ { "id": fields.String(required=True), "responsible_party_type": fields.String(required=True), - "responsible_party_investigator_first_name": fields.String(required=True), - "responsible_party_investigator_last_name": fields.String(required=True), + "responsible_party_investigator_name": fields.String(required=True), "responsible_party_investigator_title": fields.String(required=True), "responsible_party_investigator_affiliation": fields.String(required=True), - "lead_sponsor_first_name": fields.String(required=True), - "lead_sponsor_last_name": fields.String(required=True), + "lead_sponsor_name": fields.String(required=True), "collaborator_name": fields.List(fields.String, required=True), }, ) @@ -32,7 +30,7 @@ class StudyStatusResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_sponsors_collaborators_ = study_.study_sponsors_collaborators - return [s.to_dict() for s in study_sponsors_collaborators_] + return study_sponsors_collaborators_.to_dict() def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index ea9173bc..58a9fd44 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -30,7 +30,7 @@ class StudyStatusResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_status_ = study_.study_status - return [s.to_dict() for s in study_status_] + return study_status_.to_dict() def post(self, study_id: int): data = request.json @@ -40,10 +40,10 @@ def post(self, study_id: int): db.session.commit() return study_status_.to_dict() - @api.route("/study//metadata/status/") + @api.route("/study//metadata/status/") class StudyStatusUpdate(Resource): - def put(self, study_id: int, study_status_id: int): - study_status_ = StudyStatus.query.get(study_status_id) + def put(self, study_id: int, status_id: int): + study_status_ = StudyStatus.query.get(status_id) study_status_.update(request.json) db.session.commit() return study_status_.to_dict() diff --git a/model/study.py b/model/study.py index 58d0cc6c..645cd12d 100644 --- a/model/study.py +++ b/model/study.py @@ -35,13 +35,13 @@ def __init__(self): study_description = db.relationship( "StudyDescription", uselist=False, back_populates="study" ) - study_design = db.relationship("StudyDesign", back_populates="study") - study_eligibility = db.relationship("StudyEligibility", back_populates="study") + study_design = db.relationship("StudyDesign", uselist=False, back_populates="study") + study_eligibility = db.relationship("StudyEligibility", uselist=False, back_populates="study") study_identification = db.relationship( "StudyIdentification", back_populates="study" ) study_intervention = db.relationship("StudyIntervention", back_populates="study") - study_ipdsharing = db.relationship("StudyIpdsharing", back_populates="study") + study_ipdsharing = db.relationship("StudyIpdsharing", uselist=False, back_populates="study") study_link = db.relationship("StudyLink", back_populates="study") study_location = db.relationship("StudyLocation", back_populates="study") study_other = db.relationship("StudyOther", uselist=False, back_populates="study") @@ -50,9 +50,9 @@ def __init__(self): ) study_reference = db.relationship("StudyReference", back_populates="study") study_sponsors_collaborators = db.relationship( - "StudySponsorsCollaborators", back_populates="study" + "StudySponsorsCollaborators", uselist=False, back_populates="study" ) - study_status = db.relationship("StudyStatus", back_populates="study") + study_status = db.relationship("StudyStatus", uselist=False, back_populates="study") def to_dict(self): """Converts the study to a dictionary""" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index b6679576..1270ea89 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -21,7 +21,7 @@ def __init__(self, study): maximum_age_value = db.Column(db.Integer, nullable=False) minimum_age_unit = db.Column(db.String, nullable=False) maximum_age_unit = db.Column(db.String, nullable=False) - healthy_volunteers = db.Column(db.BOOLEAN, nullable=False) + healthy_volunteers = db.Column(db.String, nullable=False) inclusion_criteria = db.Column(ARRAY(String), nullable=False) exclusion_criteria = db.Column(ARRAY(String), nullable=False) study_population = db.Column(db.String, nullable=True) From 1ef22a067d69701f6c73e144d32b92c4499d791f Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 1 Sep 2023 19:17:09 +0000 Subject: [PATCH 071/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dataset.py | 3 +-- apis/study.py | 2 +- apis/study_metadata/study_arm.py | 16 +++++++++------- model/study.py | 8 ++++++-- model/study_contributor.py | 5 ++++- 5 files changed, 21 insertions(+), 13 deletions(-) diff --git a/apis/dataset.py b/apis/dataset.py index 5b4b483b..0db1e2bf 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -62,7 +62,6 @@ def post(self, study_id): @api.response(400, "Validation Error") class DatasetResource(Resource): def put(self, study_id, dataset_id): - data = request.json data_obj = Dataset.query.get(dataset_id) data_obj.update(data) @@ -75,7 +74,7 @@ def delete(self, study_id, dataset_id): db.session.delete(version) db.session.delete(data_obj) db.session.commit() - return '', 204 + return "", 204 # # # delete_study = Study.query.get(study_id) diff --git a/apis/study.py b/apis/study.py index ec1ec5eb..61803787 100644 --- a/apis/study.py +++ b/apis/study.py @@ -71,7 +71,7 @@ def delete(self, study_id: int): for version in d.dataset_versions: version.participants.clear() for d in delete_study.dataset: - for version in d .dataset_versions: + for version in d.dataset_versions: db.session.delete(version) db.session.delete(d) for p in delete_study.participants: diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index bc902ade..a1a8745d 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -44,11 +44,13 @@ def post(self, study_id: int): db.session.commit() return list_of_elements + + # todo delete - # @api.route("/study//metadata/arm/") - # class StudyArmUpdate(Resource): - # def put(self, study_id: int, arm_id: int): - # study_arm_ = StudyArm.query.get(arm_id) - # study_arm_.update(request.json) - # db.session.commit() - # return study_arm_.to_dict() +# @api.route("/study//metadata/arm/") +# class StudyArmUpdate(Resource): +# def put(self, study_id: int, arm_id: int): +# study_arm_ = StudyArm.query.get(arm_id) +# study_arm_.update(request.json) +# db.session.commit() +# return study_arm_.to_dict() diff --git a/model/study.py b/model/study.py index 645cd12d..b1ac84a1 100644 --- a/model/study.py +++ b/model/study.py @@ -36,12 +36,16 @@ def __init__(self): "StudyDescription", uselist=False, back_populates="study" ) study_design = db.relationship("StudyDesign", uselist=False, back_populates="study") - study_eligibility = db.relationship("StudyEligibility", uselist=False, back_populates="study") + study_eligibility = db.relationship( + "StudyEligibility", uselist=False, back_populates="study" + ) study_identification = db.relationship( "StudyIdentification", back_populates="study" ) study_intervention = db.relationship("StudyIntervention", back_populates="study") - study_ipdsharing = db.relationship("StudyIpdsharing", uselist=False, back_populates="study") + study_ipdsharing = db.relationship( + "StudyIpdsharing", uselist=False, back_populates="study" + ) study_link = db.relationship("StudyLink", back_populates="study") study_location = db.relationship("StudyLocation", back_populates="study") study_other = db.relationship("StudyOther", uselist=False, back_populates="study") diff --git a/model/study_contributor.py b/model/study_contributor.py index 36b00b27..9ba13b88 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -10,7 +10,10 @@ def __init__(self): __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) - user = db.relationship("User", back_populates="study_contributors", ) + user = db.relationship( + "User", + back_populates="study_contributors", + ) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) study = db.relationship("Study", back_populates="study_contributors") From 06574d42b313a3a7a7f94d36f8c5f2f10bb94986 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 1 Sep 2023 13:18:30 -0700 Subject: [PATCH 072/505] fix: study add and update endpoints --- apis/study_metadata/study_arm.py | 14 +++++++------- apis/study_metadata/study_intervention.py | 14 +++++++------- apis/study_metadata/study_location.py | 15 ++++++++------- model/study.py | 5 ++--- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index bc902ade..e814dbb9 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -45,10 +45,10 @@ def post(self, study_id: int): return list_of_elements # todo delete - # @api.route("/study//metadata/arm/") - # class StudyArmUpdate(Resource): - # def put(self, study_id: int, arm_id: int): - # study_arm_ = StudyArm.query.get(arm_id) - # study_arm_.update(request.json) - # db.session.commit() - # return study_arm_.to_dict() + @api.route("/study//metadata/arm/") + class StudyArmUpdate(Resource): + def delete(self, study_id: int, arm_id: int): + study_arm_ = StudyArm.query.get(arm_id) + db.session.delete(study_arm_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 421108a6..c0d0e8e7 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -48,10 +48,10 @@ def post(self, study_id: int): return list_of_elements - # @api.route("/study//metadata/available_ipd/") - # class StudyInterventionUpdate(Resource): - # def put(self, study_id: int, available_ipd_id: int): - # study_intervention_ = StudyIntervention.query.get(study_intervention_) - # study_intervention_.update(request.json) - # db.session.commit() - # return study_intervention_.to_dict() + @api.route("/study//metadata/intervention/") + class StudyInterventionUpdate(Resource): + def delete(self, study_id: int, intervention_id: int): + study_intervention_ = StudyIntervention.query.get(intervention_id) + db.session.delete(study_intervention_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index e8a7be5f..c53b6f02 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -48,10 +48,11 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/available_ipd/") - # class StudyLocationUpdate(Resource): - # def put(self, study_id: int, available_ipd_id: int): - # study_location_ = StudyLocation.query.get(study_location_) - # study_location_.update(request.json) - # db.session.commit() - # return study_location_.to_dict() + +@api.route("/study//metadata/location/") +class StudyLocationUpdate(Resource): + def delete(self, study_id: int, location_id: int): + study_location_ = StudyLocation.query.get(location_id) + db.session.delete(study_location_) + db.session.commit() + return 204 diff --git a/model/study.py b/model/study.py index 645cd12d..139c1494 100644 --- a/model/study.py +++ b/model/study.py @@ -12,7 +12,7 @@ class Study(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - # self.created_at = datetime.now() + self.created_at = datetime.now() __tablename__ = "study" @@ -81,8 +81,7 @@ def update(self, data): """Updates the study from a dictionary""" self.title = data["title"] self.image = data["image"] - # self.user = model.User.from_data(data["user"]) - self.updated_on = data["updated_on"] + self.updated_on = datetime.now() def validate(self): """Validates the study""" From 1123b2e80ddf7f313e8fb45c94b336182bbcc86c Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 1 Sep 2023 13:23:43 -0700 Subject: [PATCH 073/505] fix: study add and update endpoints --- apis/study.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study.py b/apis/study.py index ec1ec5eb..61803787 100644 --- a/apis/study.py +++ b/apis/study.py @@ -71,7 +71,7 @@ def delete(self, study_id: int): for version in d.dataset_versions: version.participants.clear() for d in delete_study.dataset: - for version in d .dataset_versions: + for version in d.dataset_versions: db.session.delete(version) db.session.delete(d) for p in delete_study.participants: From e3fe74d90bfb5e0dfe02cf43260de4084c1bb35d Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 1 Sep 2023 20:25:10 +0000 Subject: [PATCH 074/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_arm.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index e814dbb9..06ed3302 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -44,7 +44,8 @@ def post(self, study_id: int): db.session.commit() return list_of_elements -# todo delete + + # todo delete @api.route("/study//metadata/arm/") class StudyArmUpdate(Resource): def delete(self, study_id: int, arm_id: int): From 61961da5e887de9b28c02d6ef0b881545e778720 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 1 Sep 2023 13:47:43 -0700 Subject: [PATCH 075/505] fix: added Upsert and delete to one to many objects --- apis/study_metadata/study_available_ipd.py | 14 +++++++------- apis/study_metadata/study_contact.py | 14 +++++++------- apis/study_metadata/study_identification.py | 14 +++++++------- apis/study_metadata/study_link.py | 14 +++++++------- apis/study_metadata/study_overall_official.py | 14 +++++++------- apis/study_metadata/study_reference.py | 14 +++++++------- 6 files changed, 42 insertions(+), 42 deletions(-) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 91adfdf5..d197d0be 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -43,10 +43,10 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/available_ipd/") - # class StudyAvailableIpdUpdate(Resource): - # def put(self, study_id: int, available_ipd_id: int): - # study_available_ipd_ = StudyAvailableIpd.query.get(available_ipd_id) - # study_available_ipd_.update(request.json) - # db.session.commit() - # return study_available_ipd_.to_dict() + @api.route("/study//metadata/available_ipd/") + class StudyAvailableIpdUpdate(Resource): + def put(self, study_id: int, available_ipd_id: int): + study_available_ipd_ = StudyAvailableIpd.query.get(available_ipd_id) + db.session.delete(study_available_ipd_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 6252ba1a..a476e11b 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -46,10 +46,10 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/arm/") - # class StudyArmUpdate(Resource): - # def put(self, study_id: int, arm_id: int): - # study_arm_ = StudyContact.query.get(arm_id) - # study_arm_.update(request.json) - # db.session.commit() - # return study_arm_.to_dict() + @api.route("/study//metadata/contact/") + class StudyContactUpdate(Resource): + def delete(self, study_id: int, contact_id: int): + study_contact_ = StudyContact.query.get(contact_id) + db.session.delete(study_contact_) + db.session.commit() + return study_contact_.to_dict() diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 5c85f782..485babbd 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -47,10 +47,10 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/available_ipd/") - # class StudyIdentificationdUpdate(Resource): - # def put(self, study_id: int, available_ipd_id: int): - # study_available_ipd_ = StudyIdentification.query.get(available_ipd_id) - # study_available_ipd_.update(request.json) - # db.session.commit() - # return study_available_ipd_.to_dict() + @api.route("/study//metadata/identification/") + class StudyIdentificationdUpdate(Resource): + def delete(self, study_id: int, identification_id: int): + study_identification_ = StudyIdentification.query.get(identification_id) + db.session.delete(study_identification_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index c55b3ea0..b30bb382 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -47,10 +47,10 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/available_ipd/") - # class StudyLinkUpdate(Resource): - # def put(self, study_id: int, available_ipd_id: int): - # study_link_ = StudyLink.query.get(study_link_) - # study_link_.update(request.json) - # db.session.commit() - # return study_intervention_.to_dict() + @api.route("/study//metadata/link/") + class StudyLinkUpdate(Resource): + def delete(self, study_id: int, link_id: int): + study_link_ = StudyLink.query.get(link_id) + db.session.delete(study_link_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index cc140b5e..2dbf53fb 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -46,10 +46,10 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/available_ipd/") - # class StudyOverallOfficialUpdate(Resource): - # def put(self, study_id: int, available_ipd_id: int): - # study_overall_official_ = StudyOverallOfficial.query.get(study_overall_official_) - # study_overall_official_.update(request.json) - # db.session.commit() - # return study_overall_official_.to_dict() + @api.route("/study//metadata/overall_official/") + class StudyOverallOfficialUpdate(Resource): + def delete(self, study_id: int, overall_official_id: int): + study_overall_official_ = StudyOverallOfficial.query.get(overall_official_id) + db.session.delete(study_overall_official_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 5cd8bb7f..fcb5e062 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -46,10 +46,10 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - # @api.route("/study//metadata/available_ipd/") - # class StudyReferenceUpdate(Resource): - # def put(self, study_id: int, available_ipd_id: int): - # study_location_ = StudyReference.query.get(study_location_) - # study_location_.update(request.json) - # db.session.commit() - # return study_location_.to_dict() + @api.route("/study//metadata/reference/") + class StudyReferenceUpdate(Resource): + def delete(self, study_id: int, reference_id: int): + study_reference_ = StudyReference.query.get(reference_id) + db.session.delete(study_reference_) + db.session.commit() + return 204 From a1ae55b89ba60db227d4be5dc6c35dafc78d8289 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 1 Sep 2023 20:49:56 +0000 Subject: [PATCH 076/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_overall_official.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 2dbf53fb..4b12c35f 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -49,7 +49,9 @@ def post(self, study_id: int): @api.route("/study//metadata/overall_official/") class StudyOverallOfficialUpdate(Resource): def delete(self, study_id: int, overall_official_id: int): - study_overall_official_ = StudyOverallOfficial.query.get(overall_official_id) + study_overall_official_ = StudyOverallOfficial.query.get( + overall_official_id + ) db.session.delete(study_overall_official_) db.session.commit() return 204 From c792ec8daef8959bb6b3f7ea1d41f7bdef02ef44 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 1 Sep 2023 14:04:11 -0700 Subject: [PATCH 077/505] fix: study metadata one to one added post functions --- apis/study_metadata/study_description.py | 14 ++++++------ apis/study_metadata/study_design.py | 16 +++++++------- apis/study_metadata/study_eligibility.py | 14 ++++++------ apis/study_metadata/study_ipdsharing.py | 14 ++++++------ apis/study_metadata/study_other.py | 14 ++++++------ .../study_sponsors_collaborators.py | 22 +++++++++---------- apis/study_metadata/study_status.py | 14 ++++++------ 7 files changed, 54 insertions(+), 54 deletions(-) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 03840c40..e6b1a249 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -36,10 +36,10 @@ def post(self, study_id: int): db.session.commit() return study_description_.to_dict() - @api.route("/study//metadata/description/") - class StudyDescriptionUpdate(Resource): - def put(self, study_id: int, description_id: int): - study_description_ = StudyDescription.query.get(description_id) - study_description_.update(request.json) - db.session.commit() - return study_description_.to_dict() + # @api.route("/study//metadata/description/") + # class StudyDescriptionUpdate(Resource): + # def put(self, study_id: int, description_id: int): + # study_description_ = StudyDescription.query.get(description_id) + # study_description_.update(request.json) + # db.session.commit() + # return study_description_.to_dict() diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index e9c5b431..4ceb5e90 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -41,7 +41,7 @@ class StudyDesignResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_design_ = study_.study_design - return [s.to_dict() for s in study_design_] + return study_design_.to_dict() def post(self, study_id: int): data = request.json @@ -51,10 +51,10 @@ def post(self, study_id: int): db.session.commit() return study_design_.to_dict() - @api.route("/study//metadata/design/") - class StudyDesignUpdate(Resource): - def put(self, study_id: int, design_id: int): - study_design_ = StudyDesign.query.get(design_id) - study_design_.update(request.json) - db.session.commit() - return study_design_.to_dict() + # @api.route("/study//metadata/design/") + # class StudyDesignUpdate(Resource): + # def put(self, study_id: int, design_id: int): + # study_design_ = StudyDesign.query.get(design_id) + # study_design_.update(request.json) + # db.session.commit() + # return study_design_.to_dict() diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 79288e92..23b05ad7 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -46,10 +46,10 @@ def post(self, study_id: int): db.session.commit() return study_eligibility_.to_dict() - @api.route("/study//metadata/eligibility/") - class StudyArmUpdate(Resource): - def put(self, study_id: int, eligibility_id: int): - study_eligibility_ = StudyEligibility.query.get(eligibility_id) - study_eligibility_.update(request.json) - db.session.commit() - return study_eligibility_.to_dict() + # @api.route("/study//metadata/eligibility/") + # class StudyArmUpdate(Resource): + # def put(self, study_id: int, eligibility_id: int): + # study_eligibility_ = StudyEligibility.query.get(eligibility_id) + # study_eligibility_.update(request.json) + # db.session.commit() + # return study_eligibility_.to_dict() diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 14acdef5..6cc86be9 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -40,10 +40,10 @@ def post(self, study_id: int): db.session.commit() return study_ipdsharing_.to_dict() - @api.route("/study//metadata/ipdsharing/") - class StudyIpdsharingUpdate(Resource): - def put(self, study_id: int, study_ipdsharing_id: int): - study_ipdsharing_ = StudyIpdsharing.query.get(study_ipdsharing_id) - study_ipdsharing_.update(request.json) - db.session.commit() - return study_ipdsharing_.to_dict() + # @api.route("/study//metadata/ipdsharing/") + # class StudyIpdsharingUpdate(Resource): + # def put(self, study_id: int, study_ipdsharing_id: int): + # study_ipdsharing_ = StudyIpdsharing.query.get(study_ipdsharing_id) + # study_ipdsharing_.update(request.json) + # db.session.commit() + # return study_ipdsharing_.to_dict() diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 0d1a0e22..39641deb 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -37,10 +37,10 @@ def post(self, study_id: int): db.session.commit() return study_other_.to_dict() - @api.route("/study//metadata/other/") - class StudyOtherUpdate(Resource): - def put(self, study_id: int, other_id: int): - study_other_ = StudyOther.query.get(other_id) - study_other_.update(request.json) - db.session.commit() - return study_other_.to_dict() + # @api.route("/study//metadata/other/") + # class StudyOtherUpdate(Resource): + # def put(self, study_id: int, other_id: int): + # study_other_ = StudyOther.query.get(other_id) + # study_other_.update(request.json) + # db.session.commit() + # return study_other_.to_dict() diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 588fd40f..cd41ab41 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -42,14 +42,14 @@ def post(self, study_id: int): db.session.commit() return study_sponsors_collaborators_.to_dict() - @api.route( - "/study//metadata/sponsors_collaborators/" - ) - class StudySponsorsCollaboratorsUpdate(Resource): - def put(self, study_id: int, sponsors_collaborators_id: int): - study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( - sponsors_collaborators_id - ) - study_sponsors_collaborators_.update(request.json) - db.session.commit() - return study_sponsors_collaborators_.to_dict() + # @api.route( + # "/study//metadata/sponsors_collaborators/" + # ) + # class StudySponsorsCollaboratorsUpdate(Resource): + # def put(self, study_id: int, sponsors_collaborators_id: int): + # study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( + # sponsors_collaborators_id + # ) + # study_sponsors_collaborators_.update(request.json) + # db.session.commit() + # return study_sponsors_collaborators_.to_dict() diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 58a9fd44..2ba9e3f5 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -40,10 +40,10 @@ def post(self, study_id: int): db.session.commit() return study_status_.to_dict() - @api.route("/study//metadata/status/") - class StudyStatusUpdate(Resource): - def put(self, study_id: int, status_id: int): - study_status_ = StudyStatus.query.get(status_id) - study_status_.update(request.json) - db.session.commit() - return study_status_.to_dict() + # @api.route("/study//metadata/status/") + # class StudyStatusUpdate(Resource): + # def put(self, study_id: int, status_id: int): + # study_status_ = StudyStatus.query.get(status_id) + # study_status_.update(request.json) + # db.session.commit() + # return study_status_.to_dict() From 82c258cdb2a96eb2744f63fca62747b14cb0f522 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 1 Sep 2023 16:53:29 -0700 Subject: [PATCH 078/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sql/init.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/init.sql b/sql/init.sql index f4b4218d..fced9d4b 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -386,7 +386,7 @@ CREATE TABLE IF NOT EXISTS "study_design" ( "id" CHAR(36) NOT NULL, "design_allocation" VARCHAR, "study_type" VARCHAR NOT NULL, - "design_interventional_model" VARCHAR, + "design_intervention_model" VARCHAR, "design_intervention_model_description" VARCHAR, "design_primary_purpose" VARCHAR, "design_masking" VARCHAR, @@ -847,7 +847,7 @@ INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", -- Dumping data for table public.study_design: -1 rows -- done /*!40000 ALTER TABLE "study_design" DISABLE KEYS */; -INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_interventional_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES +INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_intervention_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES ('00000000-0000-0000-0000-000000000001', 'Randomized', 'Interventional', 'Treatment', 'description', 'Single Group Assignment', 'Single', 'description', ARRAY ['Participant'], ARRAY ['Phase 1'], 20, 'Actual', 30, NULL, NULL, NULL, NULL, NULL, NULL, '00000000-0000-0000-0000-000000000001'), ('00000000-0000-0000-0000-000000000002', NULL, 'Observational', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 20, 'Actual', NULL, ARRAY ['Cohort'], ARRAY ['Retrospective'], 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_design" ENABLE KEYS */; From 45e645a0c3832d659690550408a0e4f2b7dff27e Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 1 Sep 2023 19:05:30 -0700 Subject: [PATCH 079/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sql/init.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/init.sql b/sql/init.sql index fced9d4b..0b8abd3d 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -407,13 +407,13 @@ CREATE TABLE IF NOT EXISTS "study_design" ( CONSTRAINT "study_design_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping structure for table public.study_eligibility +-- Dumping structure for table public.study_eligibilitya CREATE TABLE IF NOT EXISTS "study_eligibility" ( "id" CHAR(36) NOT NULL, "gender" VARCHAR NOT NULL, "gender_based" VARCHAR NOT NULL, "gender_description" VARCHAR NOT NULL, - "healthy_volunteers" BOOLEAN NOT NULL, + "healthy_volunteers" VARCHAR NOT NULL, "inclusion_criteria" VARCHAR[] NOT NULL, "exclusion_criteria" VARCHAR[] NOT NULL, "study_population" VARCHAR, From 046d719682ed0533d4ae91d0d50f4520e1c80e95 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 1 Sep 2023 19:38:06 -0700 Subject: [PATCH 080/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sql/init.sql | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/sql/init.sql b/sql/init.sql index 0b8abd3d..b17001c1 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -358,8 +358,7 @@ CREATE TABLE IF NOT EXISTS "study_available_ipd" ( -- Dumping structure for table public.study_contact CREATE TABLE IF NOT EXISTS "study_contact" ( "id" CHAR(36) NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, + "name" VARCHAR NOT NULL, "affiliation" VARCHAR NOT NULL, "role" VARCHAR, "phone" VARCHAR NOT NULL, @@ -828,11 +827,11 @@ INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", -- Dumping data for table public.study_contact: -1 rows -- done /*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; -INSERT INTO "study_contact" ("id", "first_name", "last_name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Johnston', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Rolfson', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Verner', 'Nolan', 'Monahan and Sons', NULL, '501-039-841', '', 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'Lela', 'Cormier', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_contact" ("id", "name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Dejah Johnston', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Reanna Rolfson', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Verner Nolan', 'Monahan and Sons', NULL, '501-039-841', '', 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'Lela Cormier', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; -- Dumping data for table public.study_description: -1 rows From 1f58411d95275b903e907334678103c8d3513f42 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 1 Sep 2023 19:49:38 -0700 Subject: [PATCH 081/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sql/init.sql | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/sql/init.sql b/sql/init.sql index b17001c1..b3845c1d 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -505,8 +505,7 @@ CREATE TABLE IF NOT EXISTS "study_other" ( -- Dumping structure for table public.study_overall_official CREATE TABLE IF NOT EXISTS "study_overall_official" ( "id" CHAR(36) NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, + "name" VARCHAR NOT NULL, "affiliation" VARCHAR NOT NULL, "role" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, @@ -915,10 +914,10 @@ INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", -- Dumping data for table public.study_overall_official: -1 rows -- done /*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; -INSERT INTO "study_overall_official" ("id", "first_name", "last_name", "affiliation", "role", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Bashirian', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Grady', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Maiya', 'Bartoletti', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Zoey Bashirian', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Ashlynn Grady', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Maiya Bartoletti', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; -- Dumping data for table public.study_reference: 6 rows From 426a6ebc32328853ba7ac712ed4116b218bc96b7 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sat, 2 Sep 2023 23:14:39 -0700 Subject: [PATCH 082/505] fix: update metadata/overall_official endpoint to metadata/overall-official --- apis/study_metadata/study_overall_official.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 4b12c35f..50bc0bd8 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -18,7 +18,7 @@ ) -@api.route("/study//metadata/overall_official") +@api.route("/study//metadata/overall-official") class StudyOverallOfficialResource(Resource): @api.doc("overall_official") @api.response(200, "Success") @@ -46,7 +46,7 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - @api.route("/study//metadata/overall_official/") + @api.route("/study//metadata/overall-official/") class StudyOverallOfficialUpdate(Resource): def delete(self, study_id: int, overall_official_id: int): study_overall_official_ = StudyOverallOfficial.query.get( From 8ed98e77008aa8bdab5f6785a63a9ad7fcc1db35 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sat, 2 Sep 2023 23:34:58 -0700 Subject: [PATCH 083/505] fix: remove first&last name replace with name in STUDY_OVERALL_OFFICIAL --- apis/study_metadata/study_overall_official.py | 3 +-- model/study_metadata/study_overall_official.py | 9 +++------ 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 50bc0bd8..f63a071f 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -10,8 +10,7 @@ "StudyOverallOfficial", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), + "name": fields.String(required=True), "affiliation": fields.String(required=True), "role": fields.String(required=True), }, diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index a8d3d12d..0f056f7b 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -12,8 +12,7 @@ def __init__(self, study): __tablename__ = "study_overall_official" id = db.Column(db.CHAR(36), primary_key=True) - first_name = db.Column(db.String, nullable=False) - last_name = db.Column(db.String, nullable=False) + name = db.Column(db.String, nullable=False) affiliation = db.Column(db.String, nullable=False) role = db.Column(db.String, nullable=False) @@ -24,8 +23,7 @@ def to_dict(self): """Converts the study to a dictionary""" return { "id": self.id, - "first_name": self.first_name, - "last_name": self.last_name, + "name": self.name, "affiliation": self.affiliation, "role": self.role, } @@ -40,8 +38,7 @@ def from_data(study, data: dict): def update(self, data): """Updates the study from a dictionary""" - self.first_name = data["first_name"] - self.last_name = data["last_name"] + self.name = data["name"] self.affiliation = data["affiliation"] self.role = data["role"] From 72a9a80099e318a06ceb86a2ec5025fcc4dbec1a Mon Sep 17 00:00:00 2001 From: aydawka Date: Sat, 2 Sep 2023 23:39:16 -0700 Subject: [PATCH 084/505] fix: replace first & last name with name in STUDY_CONTACT --- apis/study_metadata/study_contact.py | 3 +-- model/study_metadata/study_contact.py | 9 +++------ 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index a476e11b..5f8d9006 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -7,8 +7,7 @@ "StudyContact", { "id": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), + "name": fields.String(required=True), "affiliation": fields.String(required=True), "role": fields.String(required=True), "phone": fields.String(required=True), diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 0eb952a8..f4dbed72 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -12,8 +12,7 @@ def __init__(self, study): __tablename__ = "study_contact" id = db.Column(db.CHAR(36), primary_key=True) - first_name = db.Column(db.String, nullable=False) - last_name = db.Column(db.String, nullable=False) + name = db.Column(db.String, nullable=False) affiliation = db.Column(db.String, nullable=False) role = db.Column(db.String, nullable=True) phone = db.Column(db.String, nullable=False) @@ -28,8 +27,7 @@ def to_dict(self): """Converts the study to a dictionary""" return { "id": self.id, - "first_name": self.first_name, - "last_name": self.last_name, + "name": self.name, "affiliation": self.affiliation, "role": self.role, "phone": self.phone, @@ -48,8 +46,7 @@ def from_data(study, data: dict): def update(self, data): """Updates the study from a dictionary""" - self.first_name = data["first_name"] - self.last_name = data["last_name"] + self.name = data["name"] self.affiliation = data["affiliation"] self.role = data["role"] self.phone = data["phone"] From ccd1aea7d4e98ed0e03fca77a5f5cab4954ccae1 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sat, 2 Sep 2023 23:41:49 -0700 Subject: [PATCH 085/505] fix: remove collaborators from endpoint --- .../study_sponsors_collaborators.py | 44 +++++++++---------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index cd41ab41..fcf60581 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -20,7 +20,7 @@ ) -@api.route("/study//metadata/sponsors_collaborators") +@api.route("/study//metadata/sponsors") class StudyStatusResource(Resource): @api.doc("sponsors_collaborators") @api.response(200, "Success") @@ -32,24 +32,24 @@ def get(self, study_id: int): study_sponsors_collaborators_ = study_.study_sponsors_collaborators return study_sponsors_collaborators_.to_dict() - def post(self, study_id: int): - data = request.json - study_sponsors_collaborators_ = Study.query.get(study_id) - study_sponsors_collaborators_ = StudySponsorsCollaborators.from_data( - study_sponsors_collaborators_, data - ) - db.session.add(study_sponsors_collaborators_) - db.session.commit() - return study_sponsors_collaborators_.to_dict() - - # @api.route( - # "/study//metadata/sponsors_collaborators/" - # ) - # class StudySponsorsCollaboratorsUpdate(Resource): - # def put(self, study_id: int, sponsors_collaborators_id: int): - # study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( - # sponsors_collaborators_id - # ) - # study_sponsors_collaborators_.update(request.json) - # db.session.commit() - # return study_sponsors_collaborators_.to_dict() + # def post(self, study_id: int): + # data = request.json + # study_sponsors_collaborators_ = Study.query.get(study_id) + # study_sponsors_collaborators_ = StudySponsorsCollaborators.from_data( + # study_sponsors_collaborators_, data + # ) + # db.session.add(study_sponsors_collaborators_) + # db.session.commit() + # return study_sponsors_collaborators_.to_dict() + + @api.route( + "/study//metadata/sponsors_collaborators/" + ) + class StudySponsorsCollaboratorsUpdate(Resource): + def put(self, study_id: int, sponsors_collaborators_id: int): + study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( + sponsors_collaborators_id + ) + study_sponsors_collaborators_.update(request.json) + db.session.commit() + return study_sponsors_collaborators_.to_dict() From 5d3cad7c337451adf19758fffb8a1b36eae8e7b7 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 3 Sep 2023 00:05:29 -0700 Subject: [PATCH 086/505] fix: return design_who_masked_list as array in study GET metadata/design --- apis/study_metadata/study_design.py | 3 +-- model/study_metadata/study_design.py | 11 +++++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 4ceb5e90..5e873197 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -36,8 +36,7 @@ class StudyDesignResource(Resource): @api.doc("design") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") - # @api.marshal_with(study_design) + @api.marshal_with(study_design) def get(self, study_id: int): study_ = Study.query.get(study_id) study_design_ = study_.study_design diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index a921069d..1d30191e 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -43,24 +43,23 @@ def to_dict(self): "id": self.id, "design_allocation": self.design_allocation, "study_type": self.study_type, - "design_interventional_model": str(self.design_interventional_model), + "design_interventional_model": self.design_interventional_model, "design_intervention_model_description": self.design_intervention_model_description, "design_primary_purpose": self.design_primary_purpose, "design_masking": self.design_masking, "design_masking_description": self.design_masking_description, - "design_who_masked_list": str(self.design_who_masked_list), + "design_who_masked_list": self.design_who_masked_list, "phase_list": self.phase_list, "enrollment_count": self.enrollment_count, "enrollment_type": self.enrollment_type, "number_arms": self.number_arms, - "design_observational_model_list": str( - self.design_observational_model_list - ), + "design_observational_model_list": + self.design_observational_model_list, "design_time_perspective_list": self.design_time_perspective_list, "bio_spec_retention": self.bio_spec_retention, "bio_spec_description": self.bio_spec_description, "target_duration": self.target_duration, - "number_groups_cohorts": str(self.number_groups_cohorts), + "number_groups_cohorts": self.number_groups_cohorts, } @staticmethod From 92e237532daf1718ae07430d21ebe5da2af8ba83 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 3 Sep 2023 00:11:38 -0700 Subject: [PATCH 087/505] fix: design_interventional_model to design_intervention_model in STUDY_DESIGN --- apis/study_metadata/study_design.py | 2 +- model/study_metadata/study_design.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 5e873197..74a4f134 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -11,7 +11,7 @@ "id": fields.String(required=True), "design_allocation": fields.String(required=True), "study_type": fields.String(required=True), - "design_interventional_model": fields.String(required=True), + "design_intervention_model": fields.String(required=True), "design_intervention_model_description": fields.String(required=True), "design_primary_purpose": fields.String(required=True), "design_masking": fields.String(required=True), diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 1d30191e..80d20993 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -17,7 +17,7 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) design_allocation = db.Column(db.String, nullable=True) study_type = db.Column(db.String, nullable=False) - design_interventional_model = db.Column(db.String, nullable=True) + design_intervention_model = db.Column(db.String, nullable=True) design_intervention_model_description = db.Column(db.String, nullable=True) design_primary_purpose = db.Column(db.String, nullable=True) design_masking = db.Column(db.String, nullable=True) @@ -43,7 +43,7 @@ def to_dict(self): "id": self.id, "design_allocation": self.design_allocation, "study_type": self.study_type, - "design_interventional_model": self.design_interventional_model, + "design_intervention_model": self.design_intervention_model, "design_intervention_model_description": self.design_intervention_model_description, "design_primary_purpose": self.design_primary_purpose, "design_masking": self.design_masking, @@ -74,7 +74,7 @@ def update(self, data): """Updates the study from a dictionary""" self.design_allocation = data["design_allocation"] self.study_type = data["study_type"] - self.design_interventional_model = data["design_interventional_model"] + self.design_intervention_model = data["design_intervention_model"] self.design_intervention_model_description = data[ "design_intervention_model_description" ] From 1ea7c1bfece66850211e5e33e2e7bcdaef096cb2 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 3 Sep 2023 00:43:22 -0700 Subject: [PATCH 088/505] fix: added study_type to GET metadata/eligibility from STUDY_DESIGN table --- apis/study_metadata/study_eligibility.py | 2 ++ model/study_metadata/study_eligibility.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 23b05ad7..d50ffb90 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -22,6 +22,8 @@ "exclusion_criteria": fields.List(fields.String, required=True), "study_population": fields.String(required=True), "sampling_method": fields.String(required=True), + "study_type": fields.String(required=True), + }, ) diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 1270ea89..38b19566 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -46,6 +46,8 @@ def to_dict(self): "exclusion_criteria": self.exclusion_criteria, "study_population": self.study_population, "sampling_method": self.sampling_method, + "study_type": self.study.study_design.study_type if self.study.study_design else None + } @staticmethod From 8c599d1177db64a28120095f2f0b0d568a34fe21 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sun, 3 Sep 2023 07:44:02 +0000 Subject: [PATCH 089/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_eligibility.py | 1 - model/study_metadata/study_design.py | 3 +-- model/study_metadata/study_eligibility.py | 5 +++-- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index d50ffb90..2d678ac0 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -23,7 +23,6 @@ "study_population": fields.String(required=True), "sampling_method": fields.String(required=True), "study_type": fields.String(required=True), - }, ) diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 80d20993..83f3c335 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -53,8 +53,7 @@ def to_dict(self): "enrollment_count": self.enrollment_count, "enrollment_type": self.enrollment_type, "number_arms": self.number_arms, - "design_observational_model_list": - self.design_observational_model_list, + "design_observational_model_list": self.design_observational_model_list, "design_time_perspective_list": self.design_time_perspective_list, "bio_spec_retention": self.bio_spec_retention, "bio_spec_description": self.bio_spec_description, diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 38b19566..8017f967 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -46,8 +46,9 @@ def to_dict(self): "exclusion_criteria": self.exclusion_criteria, "study_population": self.study_population, "sampling_method": self.sampling_method, - "study_type": self.study.study_design.study_type if self.study.study_design else None - + "study_type": self.study.study_design.study_type + if self.study.study_design + else None, } @staticmethod From ede4de6451c79ae19ce153e30057463923b36777 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 3 Sep 2023 00:48:41 -0700 Subject: [PATCH 090/505] fix: changed from study/contact to study/central-contact --- apis/study_metadata/study_contact.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 5f8d9006..f185f3f7 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -18,7 +18,7 @@ ) -@api.route("/study//metadata/contact") +@api.route("/study//metadata/central-contact") class StudyContactResource(Resource): @api.doc("contact") @api.response(200, "Success") @@ -45,10 +45,10 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - @api.route("/study//metadata/contact/") + @api.route("/study//metadata/central-contact/") class StudyContactUpdate(Resource): - def delete(self, study_id: int, contact_id: int): - study_contact_ = StudyContact.query.get(contact_id) + def delete(self, study_id: int, central_contact_id: int): + study_contact_ = StudyContact.query.get(central_contact_id) db.session.delete(study_contact_) db.session.commit() return study_contact_.to_dict() From 5016f526fa67d0d95fdbdc82ae5f4169f2e1a0d0 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 3 Sep 2023 14:12:54 -0700 Subject: [PATCH 091/505] feat: create a collab_name list from STUDY_SPONSORS_COLLABORATORS table --- .../study_sponsors_collaborators.py | 60 +++++++++++++------ .../study_sponsors_collaborators.py | 15 ++++- 2 files changed, 55 insertions(+), 20 deletions(-) diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index fcf60581..c0bae6e7 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -6,8 +6,8 @@ from apis.study_metadata_namespace import api -study_sponsors_collaborators = api.model( - "StudySponsorsCollaborators", +study_sponsors = api.model( + "StudySponsors", { "id": fields.String(required=True), "responsible_party_type": fields.String(required=True), @@ -15,40 +15,62 @@ "responsible_party_investigator_title": fields.String(required=True), "responsible_party_investigator_affiliation": fields.String(required=True), "lead_sponsor_name": fields.String(required=True), - "collaborator_name": fields.List(fields.String, required=True), }, ) +study_collaborators = api.model( + "StudyCollaborators", + { + "collaborator_name": fields.List(fields.String, required=True), + }, +) + @api.route("/study//metadata/sponsors") -class StudyStatusResource(Resource): - @api.doc("sponsors_collaborators") +class StudySponsorsResource(Resource): + @api.doc("sponsors") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - @api.marshal_with(study_sponsors_collaborators) + @api.marshal_with(study_sponsors) def get(self, study_id: int): study_ = Study.query.get(study_id) study_sponsors_collaborators_ = study_.study_sponsors_collaborators return study_sponsors_collaborators_.to_dict() - # def post(self, study_id: int): - # data = request.json - # study_sponsors_collaborators_ = Study.query.get(study_id) - # study_sponsors_collaborators_ = StudySponsorsCollaborators.from_data( - # study_sponsors_collaborators_, data - # ) - # db.session.add(study_sponsors_collaborators_) - # db.session.commit() - # return study_sponsors_collaborators_.to_dict() @api.route( - "/study//metadata/sponsors_collaborators/" + "/study//metadata/sponsors_collaborators/" + ) + class StudySponsorsUpdate(Resource): + def put(self, study_id: int, sponsors_id: int): + study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( + sponsors_id + ) + study_sponsors_collaborators_.update(request.json) + db.session.commit() + return study_sponsors_collaborators_.to_dict() + + +@api.route("/study//metadata/collaborators") +class StudyCollaboratorsResource(Resource): + @api.doc("collaborators") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The study identifier") + # @api.marshal_with(study_collaborators) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_sponsors_collaborators_ = study_.study_sponsors_collaborators.collaborator_name + return study_sponsors_collaborators_ + + @api.route( + "/study//metadata/collaborators/" ) - class StudySponsorsCollaboratorsUpdate(Resource): - def put(self, study_id: int, sponsors_collaborators_id: int): + class StudyCollaboratorsUpdate(Resource): + def put(self, study_id: int, collaborators_id: int): study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( - sponsors_collaborators_id + collaborators_id ) study_sponsors_collaborators_.update(request.json) db.session.commit() diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 5234d949..52aae4e0 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -33,7 +33,7 @@ def to_dict(self): "responsible_party_investigator_title": self.responsible_party_investigator_title, "responsible_party_investigator_affiliation": self.responsible_party_investigator_affiliation, "lead_sponsor_name": self.lead_sponsor_name, - "collaborator_name": self.collaborator_name, + } @staticmethod @@ -60,6 +60,19 @@ def update(self, data): self.lead_sponsor_name = data["lead_sponsor_name"] self.collaborator_name = data["collaborator_name"] + + @staticmethod + def from_data_(study, data: dict): + """Creates a new study from a dictionary""" + study_sponsors_collaborators = StudySponsorsCollaborators(study) + study_sponsors_collaborators.update(data) + + return study_sponsors_collaborators + + def update_collaborators(self, data): + """Updates the study from a dictionary""" + self.collaborator_name = data["collaborator_name"] + def validate(self): """Validates the lead_sponsor_last_name study""" violations = [] From 9791170b8e85f57293b4e0ee787cc60c269fa454 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 3 Sep 2023 14:20:50 -0700 Subject: [PATCH 092/505] feat: create aN oversight_has_dmc prop from STUDY_OTHER table --- apis/study_metadata/study_other.py | 23 +++++++++++++++++++ .../study_sponsors_collaborators.py | 4 ++-- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 39641deb..502bd584 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -44,3 +44,26 @@ def post(self, study_id: int): # study_other_.update(request.json) # db.session.commit() # return study_other_.to_dict() + + + +@api.route("/study//metadata/oversight") +class StudyOversightResource(Resource): + @api.doc("other") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.param("id", "The study identifier") + # @api.marshal_with(study_other) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_other_ = study_.study_other.oversight_has_dmc + return {"oversight_has_dmc": study_other_} + + def post(self, study_id: int): + data = request.json + study_other_ = Study.query.get(study_id) + study_other_ = StudyOther.from_data(study_other_, data) + db.session.add(study_other_) + db.session.commit() + return study_other_.to_dict() + diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index c0bae6e7..42634707 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -61,8 +61,8 @@ class StudyCollaboratorsResource(Resource): # @api.marshal_with(study_collaborators) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_sponsors_collaborators_ = study_.study_sponsors_collaborators.collaborator_name - return study_sponsors_collaborators_ + study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name + return {"collaborator_name": study_collaborators_} @api.route( "/study//metadata/collaborators/" From 9789c0171d378992197e612c421e75e4cd7a426a Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 3 Sep 2023 14:24:41 -0700 Subject: [PATCH 093/505] feat: create a conditions list from STUDY_other table --- apis/study_metadata/study_other.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 502bd584..db8333e8 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -56,10 +56,10 @@ class StudyOversightResource(Resource): # @api.marshal_with(study_other) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_other_ = study_.study_other.oversight_has_dmc - return {"oversight_has_dmc": study_other_} + study_oversight_has_dmc = study_.study_other.oversight_has_dmc + return {"oversight_has_dmc": study_oversight_has_dmc} - def post(self, study_id: int): + def put(self, study_id: int): data = request.json study_other_ = Study.query.get(study_id) study_other_ = StudyOther.from_data(study_other_, data) @@ -67,3 +67,23 @@ def post(self, study_id: int): db.session.commit() return study_other_.to_dict() + +@api.route("/study//metadata/conditions") +class StudyOversightResource(Resource): + @api.doc("other") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.param("id", "The study identifier") + # @api.marshal_with(study_other) + def get(self, study_id: int): + study_ = Study.query.get(study_id) + study_other_conditions = study_.study_other.conditions + return {"conditions": study_other_conditions} + + def put(self, study_id: int): + data = request.json + study_other_ = Study.query.get(study_id) + study_other_ = StudyOther.from_data(study_other_, data) + db.session.add(study_other_) + db.session.commit() + return study_other_.to_dict() From 88b448450dca89f4c38f6974dc3bb49ac28bc630 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 4 Sep 2023 09:42:55 -0700 Subject: [PATCH 094/505] fix: converted POST to PUT in status --- apis/study_metadata/study_status.py | 10 ++++------ model/study.py | 19 +++++++++++-------- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 2ba9e3f5..32c8cd48 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -32,13 +32,11 @@ def get(self, study_id: int): study_status_ = study_.study_status return study_status_.to_dict() - def post(self, study_id: int): - data = request.json - study_status_ = Study.query.get(study_id) - study_status_ = StudyStatus.from_data(study_status_, data) - db.session.add(study_status_) + def put(self, study_id: int): + study = Study.query.get(study_id) + study.study_status.update(request.json) db.session.commit() - return study_status_.to_dict() + return study.study_status.to_dict() # @api.route("/study//metadata/status/") # class StudyStatusUpdate(Resource): diff --git a/model/study.py b/model/study.py index ae628c81..d1db171a 100644 --- a/model/study.py +++ b/model/study.py @@ -1,10 +1,12 @@ import uuid from datetime import datetime -from flask import jsonify +import datetime import model - from .db import db +from datetime import timezone + + class Study(db.Model): @@ -12,15 +14,16 @@ class Study(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - self.created_at = datetime.now() + self.created_at = datetime.datetime.now(timezone.utc).timestamp() - __tablename__ = "study" + self.study_status = model.StudyStatus(self) + __tablename__ = "study" id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) image = db.Column(db.String, nullable=False) - created_at = db.Column(db.DateTime, nullable=False) - updated_on = db.Column(db.DateTime, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + updated_on = db.Column(db.BigInteger, nullable=False) dataset = db.relationship("Dataset", back_populates="study") study_contributors = db.relationship("StudyContributor", back_populates="study") @@ -64,7 +67,7 @@ def to_dict(self): "id": self.id, "title": self.title, "image": self.image, - "created_at": str(self.created_at), + "created_at": self.created_at, "updated_on": str(self.updated_on), # "study_contributors": self.study_contributors.to_dict(), "size": self.study_other.size if self.study_other else None, @@ -85,7 +88,7 @@ def update(self, data): """Updates the study from a dictionary""" self.title = data["title"] self.image = data["image"] - self.updated_on = datetime.now() + self.updated_on = datetime.datetime.now(timezone.utc).timestamp() def validate(self): """Validates the study""" From 9147cb5613dc07d578fb115254657ab2f94bf309 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 4 Sep 2023 09:47:14 -0700 Subject: [PATCH 095/505] fix: changed Identification endpoint structure, POST and PUT endpoints --- apis/study_metadata/study_identification.py | 26 +++++++++++++------- model/__init__.py | 3 +++ model/study_metadata/identifiers.py | 14 +++++++++++ model/study_metadata/study_identification.py | 5 ---- 4 files changed, 34 insertions(+), 14 deletions(-) create mode 100644 model/study_metadata/identifiers.py diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 485babbd..e21a4e0b 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,5 +1,5 @@ from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyIdentification +from model import Study, db, StudyIdentification, Identifiers from flask import request @@ -25,32 +25,40 @@ class StudyIdentificationResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - @api.marshal_with(study_identification) + # @api.marshal_with(study_identification) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_identification_ = study_.study_identification - return [s.to_dict() for s in study_identification_] + identifiers = Identifiers(study_) + return identifiers.to_dict() def post(self, study_id: int): data = request.json study_obj = Study.query.get(study_id) - list_of_elements = [] - for i in data: + primary = data["primary"] + if "id" in primary and primary["id"]: + study_identification_ = StudyIdentification.query.get(primary["id"]) + study_identification_.update(primary) + elif "id" not in primary or not primary["id"]: + study_identification_ = StudyIdentification.from_data(study_obj, primary) + db.session.add(study_identification_) + + for i in data["secondary"]: if "id" in i and i["id"]: study_identification_ = StudyIdentification.query.get(i["id"]) study_identification_.update(i) - list_of_elements.append(study_identification_.to_dict()) elif "id" not in i or not i["id"]: study_identification_ = StudyIdentification.from_data(study_obj, i) db.session.add(study_identification_) - list_of_elements.append(study_identification_.to_dict()) db.session.commit() - return list_of_elements + identifiers = Identifiers(study_obj) + return identifiers.to_dict() @api.route("/study//metadata/identification/") class StudyIdentificationdUpdate(Resource): def delete(self, study_id: int, identification_id: int): study_identification_ = StudyIdentification.query.get(identification_id) + if not study_identification_.secondary: + return 400, "primary identifier can not be deleted" db.session.delete(study_identification_) db.session.commit() return 204 diff --git a/model/__init__.py b/model/__init__.py index fb161460..6892cfd9 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,5 +1,6 @@ from .version import Version from .dataset_versions import DatasetVersions + from .db import db from .participant import Participant from .study import Study @@ -54,6 +55,7 @@ from .study_metadata.study_reference import StudyReference from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus +from .study_metadata.identifiers import Identifiers __all__ = [ @@ -104,4 +106,5 @@ "StudyReference", "StudySponsorsCollaborators", "StudyStatus", + "Identifiers" ] diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py new file mode 100644 index 00000000..52756a08 --- /dev/null +++ b/model/study_metadata/identifiers.py @@ -0,0 +1,14 @@ +from model import Study + + +class Identifiers: + def __init__(self, study: Study): + self.study = study + + study: Study + def to_dict(self): + return { + "primary": [identifier for identifier in self.study.study_identification if not identifier.secondary][0].to_dict(), + "secondary": [identifier.to_dict() for identifier in self.study.study_identification if identifier.secondary], + } + diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 92366834..89ef7b00 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -3,14 +3,11 @@ class StudyIdentification(db.Model): - """A study is a collection of datasets and participants""" - def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study __tablename__ = "study_identification" - id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) identifier_type = db.Column(db.String, nullable=False) @@ -22,14 +19,12 @@ def __init__(self, study): study = db.relationship("Study", back_populates="study_identification") def to_dict(self): - """Converts the study to a dictionary""" return { "id": self.id, "identifier": self.identifier, "identifier_type": self.identifier_type, "identifier_domain": self.identifier_domain, "identifier_link": self.identifier_link, - "secondary": self.secondary, } @staticmethod From 30a9f9f80a9f94b441658f1d3233012a0a1ab396 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 4 Sep 2023 09:52:41 -0700 Subject: [PATCH 096/505] fix: central-contact GET return only if central_contact attribute is true. --- apis/study_metadata/study_contact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index f185f3f7..539c837b 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -27,7 +27,7 @@ class StudyContactResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_contact_ = study_.study_contact - return [s.to_dict() for s in study_contact_] + return [s.to_dict() for s in study_contact_ if s.central_contact] def post(self, study_id: int): data = request.json From c0a0c205cec19fadff38705c21f3f3f8b5755787 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 5 Sep 2023 09:54:57 -0700 Subject: [PATCH 097/505] fix: study other type --- apis/study_metadata/study_other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index db8333e8..40b0acdb 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -9,7 +9,7 @@ "StudyOther", { "id": fields.String(required=True), - "oversight_has_dmc": fields.String(required=True), + "oversight_has_dmc": fields.Boolean(required=True), "conditions": fields.String(required=True), "keywords": fields.String(required=True), "size": fields.String(required=True), From ec916d7d273427b8ac63060b77715734fe35c58c Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 5 Sep 2023 16:55:45 +0000 Subject: [PATCH 098/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_other.py | 5 ++--- .../study_metadata/study_sponsors_collaborators.py | 10 +++------- model/__init__.py | 2 +- model/study.py | 2 -- model/study_metadata/identifiers.py | 14 +++++++++++--- .../study_metadata/study_sponsors_collaborators.py | 2 -- 6 files changed, 17 insertions(+), 18 deletions(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 40b0acdb..de184efb 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -46,14 +46,13 @@ def post(self, study_id: int): # return study_other_.to_dict() - @api.route("/study//metadata/oversight") class StudyOversightResource(Resource): @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The study identifier") - # @api.marshal_with(study_other) + # @api.marshal_with(study_other) def get(self, study_id: int): study_ = Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc @@ -74,7 +73,7 @@ class StudyOversightResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.param("id", "The study identifier") - # @api.marshal_with(study_other) + # @api.marshal_with(study_other) def get(self, study_id: int): study_ = Study.query.get(study_id) study_other_conditions = study_.study_other.conditions diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 42634707..1c8d498a 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -26,6 +26,7 @@ }, ) + @api.route("/study//metadata/sponsors") class StudySponsorsResource(Resource): @api.doc("sponsors") @@ -38,10 +39,7 @@ def get(self, study_id: int): study_sponsors_collaborators_ = study_.study_sponsors_collaborators return study_sponsors_collaborators_.to_dict() - - @api.route( - "/study//metadata/sponsors_collaborators/" - ) + @api.route("/study//metadata/sponsors_collaborators/") class StudySponsorsUpdate(Resource): def put(self, study_id: int, sponsors_id: int): study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( @@ -64,9 +62,7 @@ def get(self, study_id: int): study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name return {"collaborator_name": study_collaborators_} - @api.route( - "/study//metadata/collaborators/" - ) + @api.route("/study//metadata/collaborators/") class StudyCollaboratorsUpdate(Resource): def put(self, study_id: int, collaborators_id: int): study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( diff --git a/model/__init__.py b/model/__init__.py index 6892cfd9..f380a1dc 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -106,5 +106,5 @@ "StudyReference", "StudySponsorsCollaborators", "StudyStatus", - "Identifiers" + "Identifiers", ] diff --git a/model/study.py b/model/study.py index d1db171a..a5f3f348 100644 --- a/model/study.py +++ b/model/study.py @@ -7,8 +7,6 @@ from datetime import timezone - - class Study(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 52756a08..5ae8fbf6 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -6,9 +6,17 @@ def __init__(self, study: Study): self.study = study study: Study + def to_dict(self): return { - "primary": [identifier for identifier in self.study.study_identification if not identifier.secondary][0].to_dict(), - "secondary": [identifier.to_dict() for identifier in self.study.study_identification if identifier.secondary], + "primary": [ + identifier + for identifier in self.study.study_identification + if not identifier.secondary + ][0].to_dict(), + "secondary": [ + identifier.to_dict() + for identifier in self.study.study_identification + if identifier.secondary + ], } - diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 52aae4e0..62a52f53 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -33,7 +33,6 @@ def to_dict(self): "responsible_party_investigator_title": self.responsible_party_investigator_title, "responsible_party_investigator_affiliation": self.responsible_party_investigator_affiliation, "lead_sponsor_name": self.lead_sponsor_name, - } @staticmethod @@ -60,7 +59,6 @@ def update(self, data): self.lead_sponsor_name = data["lead_sponsor_name"] self.collaborator_name = data["collaborator_name"] - @staticmethod def from_data_(study, data: dict): """Creates a new study from a dictionary""" From 0d770207d3737165a14c232ab41e4e83562fcf8f Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 5 Sep 2023 11:17:41 -0700 Subject: [PATCH 099/505] fix: arm GET return specific format from STUDY_DESIGN table --- apis/study_metadata/study_arm.py | 37 ++++++++++++++++++------------- model/__init__.py | 4 +++- model/study_metadata/arm.py | 16 +++++++++++++ model/study_metadata/study_arm.py | 6 +++-- 4 files changed, 44 insertions(+), 19 deletions(-) create mode 100644 model/study_metadata/arm.py diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 06ed3302..ccf42b3e 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,20 +1,27 @@ from flask_restx import Resource, fields -from model import Study, db, StudyArm +from model import Study, db, StudyArm, Arm from flask import request from apis.study_metadata_namespace import api - -study_arm = api.model( - "StudyArm", - { +arm_object=api.model( + "ArmObject", { "id": fields.String(required=True), "label": fields.String(required=True), "type": fields.String(required=True), "description": fields.String(required=True), "intervention_list": fields.List(fields.String, required=True), - }, + } +) + +study_arm = api.model( + "StudyArm", + { + "arm": fields.Nested(arm_object, required=True), + "study_type": fields.String(required=True) + + } ) @@ -22,28 +29,26 @@ class StudyArmResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(study_arm) - def get(self, study_id: int): + # @api.marshal_with(study_arm) + def get(self, study_id): study_ = Study.query.get(study_id) - study_arm_ = study_.study_arm - return [s.to_dict() for s in study_arm_] + arm = Arm(study_) + + return arm.to_dict() - def post(self, study_id: int): + def post(self, study_id): data = request.json study_obj = Study.query.get(study_id) - list_of_elements = [] for i in data: if "id" in i and i["id"]: study_arm_ = StudyArm.query.get(i["id"]) study_arm_.update(i) - list_of_elements.append(study_arm_.to_dict()) elif "id" not in i or not i["id"]: study_arm_ = StudyArm.from_data(study_obj, i) db.session.add(study_arm_) - list_of_elements.append(study_arm_.to_dict()) db.session.commit() - - return list_of_elements + arms = Arm(study_obj) + return arms.to_dict() # todo delete @api.route("/study//metadata/arm/") diff --git a/model/__init__.py b/model/__init__.py index 6892cfd9..fb79bcd9 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -56,6 +56,7 @@ from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus from .study_metadata.identifiers import Identifiers +from .study_metadata.arm import Arm __all__ = [ @@ -106,5 +107,6 @@ "StudyReference", "StudySponsorsCollaborators", "StudyStatus", - "Identifiers" + "Identifiers", + "Arm" ] diff --git a/model/study_metadata/arm.py b/model/study_metadata/arm.py new file mode 100644 index 00000000..051eecb8 --- /dev/null +++ b/model/study_metadata/arm.py @@ -0,0 +1,16 @@ +from model import Study + + +class Arm: + def __init__(self, study: Study): + self.study = study + + study: Study + def to_dict(self): + return { + "arms": [arm.to_dict() for arm in self.study.study_arm], + "study_type": self.study.study_design.study_type, + } + + + diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 218e2693..2b3dea3e 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -30,8 +30,10 @@ def to_dict(self): "label": self.label, "type": self.type, "description": str(self.description), - "intervention_list": self.intervention_list, - } + "intervention_list": self.intervention_list + }, + + @staticmethod def from_data(study, data): From 3fee0f1d805da3cb54671c4f7ddd9cbdcc22a9ff Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 5 Sep 2023 18:18:54 +0000 Subject: [PATCH 100/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_arm.py | 12 ++++++------ model/__init__.py | 2 +- model/study_metadata/arm.py | 4 +--- model/study_metadata/study_arm.py | 18 +++++++++--------- 4 files changed, 17 insertions(+), 19 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index ccf42b3e..d753206e 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -5,23 +5,23 @@ from apis.study_metadata_namespace import api -arm_object=api.model( - "ArmObject", { +arm_object = api.model( + "ArmObject", + { "id": fields.String(required=True), "label": fields.String(required=True), "type": fields.String(required=True), "description": fields.String(required=True), "intervention_list": fields.List(fields.String, required=True), - } + }, ) study_arm = api.model( "StudyArm", { "arm": fields.Nested(arm_object, required=True), - "study_type": fields.String(required=True) - - } + "study_type": fields.String(required=True), + }, ) diff --git a/model/__init__.py b/model/__init__.py index fb79bcd9..041cfb11 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -108,5 +108,5 @@ "StudySponsorsCollaborators", "StudyStatus", "Identifiers", - "Arm" + "Arm", ] diff --git a/model/study_metadata/arm.py b/model/study_metadata/arm.py index 051eecb8..ef84cb8e 100644 --- a/model/study_metadata/arm.py +++ b/model/study_metadata/arm.py @@ -6,11 +6,9 @@ def __init__(self, study: Study): self.study = study study: Study + def to_dict(self): return { "arms": [arm.to_dict() for arm in self.study.study_arm], "study_type": self.study.study_design.study_type, } - - - diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 2b3dea3e..12f0acfa 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -25,15 +25,15 @@ def __init__(self, study): def to_dict(self): """Converts the study to a dictionary""" - return { - "id": self.id, - "label": self.label, - "type": self.type, - "description": str(self.description), - "intervention_list": self.intervention_list - }, - - + return ( + { + "id": self.id, + "label": self.label, + "type": self.type, + "description": str(self.description), + "intervention_list": self.intervention_list, + }, + ) @staticmethod def from_data(study, data): From d9ef30f3e41cb64b988d5ad93afc2262a7e569f2 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 5 Sep 2023 15:45:50 -0700 Subject: [PATCH 101/505] fix: POST functions updated to PUT in 1 to 1 relations --- apis/study_metadata/study_description.py | 18 ++------- apis/study_metadata/study_design.py | 24 ++++++------ apis/study_metadata/study_eligibility.py | 31 +++++++-------- apis/study_metadata/study_ipdsharing.py | 31 +++++++-------- apis/study_metadata/study_other.py | 32 +++++---------- .../study_sponsors_collaborators.py | 39 ++++++++++--------- 6 files changed, 74 insertions(+), 101 deletions(-) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index e6b1a249..cfbce6d7 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -28,18 +28,8 @@ def get(self, study_id: int): study_description_ = study_.study_description return study_description_.to_dict() - def post(self, study_id: int): - data = request.json - study_description_ = Study.query.get(study_id) - study_description_ = StudyDescription.from_data(study_description_, data) - db.session.add(study_description_) + def put(self, study_id: int): + study_ = Study.query.get(study_id) + study_.study_description.update(request.json) db.session.commit() - return study_description_.to_dict() - - # @api.route("/study//metadata/description/") - # class StudyDescriptionUpdate(Resource): - # def put(self, study_id: int, description_id: int): - # study_description_ = StudyDescription.query.get(description_id) - # study_description_.update(request.json) - # db.session.commit() - # return study_description_.to_dict() + return study_.study_description.to_dict() diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 74a4f134..23a2fdd2 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -42,18 +42,16 @@ def get(self, study_id: int): study_design_ = study_.study_design return study_design_.to_dict() - def post(self, study_id: int): - data = request.json - study_design_ = Study.query.get(study_id) - study_design_ = StudyDesign.from_data(study_design_, data) - db.session.add(study_design_) + def put(self, study_id: int): + study_ = Study.query.get(study_id) + study_.study_design.update(request.json) db.session.commit() - return study_design_.to_dict() + return study_.study_design.to_dict() - # @api.route("/study//metadata/design/") - # class StudyDesignUpdate(Resource): - # def put(self, study_id: int, design_id: int): - # study_design_ = StudyDesign.query.get(design_id) - # study_design_.update(request.json) - # db.session.commit() - # return study_design_.to_dict() + # def post(self, study_id: int): + # data = request.json + # study_design_ = Study.query.get(study_id) + # study_design_ = StudyDesign.from_data(study_design_, data) + # db.session.add(study_design_) + # db.session.commit() + # return study_design_.to_dict() diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 2d678ac0..d5fbc29b 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -36,21 +36,18 @@ class StudyEligibilityResource(Resource): @api.marshal_with(study_eligibility) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_eligibility_ = study_.study_eligibility - return study_eligibility_.to_dict() - - def post(self, study_id: int): - data = request.json - study_eligibility_ = Study.query.get(study_id) - study_eligibility_ = StudyEligibility.from_data(study_eligibility_, data) - db.session.add(study_eligibility_) + return study_.study_eligibility.to_dict() + + def put(self, study_id: int): + study_ = Study.query.get(study_id) + study_.study_eligibility.update(request.json) db.session.commit() - return study_eligibility_.to_dict() - - # @api.route("/study//metadata/eligibility/") - # class StudyArmUpdate(Resource): - # def put(self, study_id: int, eligibility_id: int): - # study_eligibility_ = StudyEligibility.query.get(eligibility_id) - # study_eligibility_.update(request.json) - # db.session.commit() - # return study_eligibility_.to_dict() + return study_.study_eligibility.to_dict() + + # def post(self, study_id: int): + # data = request.json + # study_eligibility_ = Study.query.get(study_id) + # study_eligibility_ = StudyEligibility.from_data(study_eligibility_, data) + # db.session.add(study_eligibility_) + # db.session.commit() + # return study_eligibility_.to_dict() diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 6cc86be9..579a975f 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -29,21 +29,18 @@ class StudyIpdsharingResource(Resource): @api.marshal_with(study_ipdsharing) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_ipdsharing_ = study_.study_ipdsharing - return study_ipdsharing_.to_dict() - - def post(self, study_id: int): - data = request.json - study_ipdsharing_ = Study.query.get(study_id) - study_ipdsharing_ = StudyIpdsharing.from_data(study_ipdsharing_, data) - db.session.add(study_ipdsharing_) + return study_.study_ipdsharing.to_dict() + + def put(self, study_id: int): + study_ = Study.query.get(study_id) + study_.study_ipdsharing.update(request.json) db.session.commit() - return study_ipdsharing_.to_dict() - - # @api.route("/study//metadata/ipdsharing/") - # class StudyIpdsharingUpdate(Resource): - # def put(self, study_id: int, study_ipdsharing_id: int): - # study_ipdsharing_ = StudyIpdsharing.query.get(study_ipdsharing_id) - # study_ipdsharing_.update(request.json) - # db.session.commit() - # return study_ipdsharing_.to_dict() + return study_.study_ipdsharing.to_dict() + + # def post(self, study_id: int): + # data = request.json + # study_ipdsharing_ = Study.query.get(study_id) + # study_ipdsharing_ = StudyIpdsharing.from_data(study_ipdsharing_, data) + # db.session.add(study_ipdsharing_) + # db.session.commit() + # return study_ipdsharing_.to_dict() diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index de184efb..3b0742f6 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -29,21 +29,11 @@ def get(self, study_id: int): study_other_ = study_.study_other return study_other_.to_dict() - def post(self, study_id: int): - data = request.json - study_other_ = Study.query.get(study_id) - study_other_ = StudyOther.from_data(study_other_, data) - db.session.add(study_other_) + def put(self, study_id: int): + study_ = Study.query.get(study_id) + study_.study_other.update(request.json) db.session.commit() - return study_other_.to_dict() - - # @api.route("/study//metadata/other/") - # class StudyOtherUpdate(Resource): - # def put(self, study_id: int, other_id: int): - # study_other_ = StudyOther.query.get(other_id) - # study_other_.update(request.json) - # db.session.commit() - # return study_other_.to_dict() + return study_.study_other.to_dict() @api.route("/study//metadata/oversight") @@ -60,11 +50,10 @@ def get(self, study_id: int): def put(self, study_id: int): data = request.json - study_other_ = Study.query.get(study_id) - study_other_ = StudyOther.from_data(study_other_, data) - db.session.add(study_other_) + study_ = Study.query.get(study_id) + study_oversight = study_.study_other.oversight_has_dmc = data["oversight_has_dmc"] db.session.commit() - return study_other_.to_dict() + return study_oversight @api.route("/study//metadata/conditions") @@ -81,8 +70,7 @@ def get(self, study_id: int): def put(self, study_id: int): data = request.json - study_other_ = Study.query.get(study_id) - study_other_ = StudyOther.from_data(study_other_, data) - db.session.add(study_other_) + study_ = Study.query.get(study_id) + study_.study_other.conditions = data["conditions"] db.session.commit() - return study_other_.to_dict() + return study_.study_other.conditions diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 1c8d498a..0162b71c 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -39,15 +39,11 @@ def get(self, study_id: int): study_sponsors_collaborators_ = study_.study_sponsors_collaborators return study_sponsors_collaborators_.to_dict() - @api.route("/study//metadata/sponsors_collaborators/") - class StudySponsorsUpdate(Resource): - def put(self, study_id: int, sponsors_id: int): - study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( - sponsors_id - ) - study_sponsors_collaborators_.update(request.json) - db.session.commit() - return study_sponsors_collaborators_.to_dict() + def put(self, study_id: int): + study_ = Study.query.get(study_id) + study_.study_sponsors_collaborators.update(request.json) + db.session.commit() + return study_.study_sponsors_collaborators.to_dict() @api.route("/study//metadata/collaborators") @@ -62,12 +58,19 @@ def get(self, study_id: int): study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name return {"collaborator_name": study_collaborators_} - @api.route("/study//metadata/collaborators/") - class StudyCollaboratorsUpdate(Resource): - def put(self, study_id: int, collaborators_id: int): - study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( - collaborators_id - ) - study_sponsors_collaborators_.update(request.json) - db.session.commit() - return study_sponsors_collaborators_.to_dict() + def put(self, study_id: int): + data = request.json + study_ = Study.query.get(study_id) + study_.study_sponsors_collaborators.collaborator_name = data["collaborator_name"] + db.session.commit() + return study_.study_sponsors_collaborators.collaborator_name + + # @api.route("/study//metadata/collaborators/") + # class StudyCollaboratorsUpdate(Resource): + # def put(self, study_id: int, collaborators_id: int): + # study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( + # collaborators_id + # ) + # study_sponsors_collaborators_.update(request.json) + # db.session.commit() + # return study_sponsors_collaborators_.to_dict() From 58aa63c87fd6e0de849ddde3be666da96e2d40c3 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 5 Sep 2023 15:52:54 -0700 Subject: [PATCH 102/505] feat: study metadata (1-to-1) created when new study initiated --- model/study.py | 11 +++++++++-- model/study_metadata/study_description.py | 2 ++ model/study_metadata/study_design.py | 19 +++++++++++++++++++ model/study_metadata/study_eligibility.py | 13 ++++++++++++- model/study_metadata/study_ipdsharing.py | 7 ++++++- model/study_metadata/study_other.py | 5 ++++- .../study_sponsors_collaborators.py | 8 ++++++-- model/study_metadata/study_status.py | 8 ++++++-- 8 files changed, 64 insertions(+), 9 deletions(-) diff --git a/model/study.py b/model/study.py index a5f3f348..5d53a92f 100644 --- a/model/study.py +++ b/model/study.py @@ -13,8 +13,15 @@ class Study(db.Model): def __init__(self): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() + # + # self.study_status = model.StudyStatus(self) + self.study_sponsors_collaborators = model.StudySponsorsCollaborators(self) + self.study_design = model.StudyDesign(self) + self.study_eligibility = model.StudyEligibility(self) + self.study_ipdsharing = model.StudyIpdsharing(self) + self.study_description = model.StudyDescription(self) - self.study_status = model.StudyStatus(self) + self.study_other = model.StudyOther(self) __tablename__ = "study" id = db.Column(db.CHAR(36), primary_key=True) @@ -66,7 +73,7 @@ def to_dict(self): "title": self.title, "image": self.image, "created_at": self.created_at, - "updated_on": str(self.updated_on), + "updated_on": self.updated_on, # "study_contributors": self.study_contributors.to_dict(), "size": self.study_other.size if self.study_other else None, "description": self.study_description.brief_summary diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index 3efc10fc..e6ecfcca 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -8,6 +8,8 @@ class StudyDescription(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.brief_summary = '' + self.detailed_description = '' __tablename__ = "study_description" diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 83f3c335..ab5b5e1a 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -12,6 +12,25 @@ def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.design_allocation = "" + self.study_type = "" + self.design_intervention_model = "" + self.design_intervention_model_description = "" + self.design_primary_purpose = "" + self.design_masking = "" + self.design_masking_description = "" + self.design_who_masked_list = [] + self.phase_list = [] + self.enrollment_count = 0 + self.enrollment_type = "" + self.number_arms = 0 + self.design_observational_model_list = [] + self.design_time_perspective_list = [] + self.bio_spec_retention = "" + self.bio_spec_description = "" + self.target_duration = "" + self.number_groups_cohorts = 0 + __tablename__ = "study_design" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 8017f967..9c520a7f 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -10,7 +10,18 @@ class StudyEligibility(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study - + self.gender = "" + self.gender_based = "" + self.gender_description = "" + self.minimum_age_value = 18 + self.maximum_age_value = 60 + self.minimum_age_unit = "" + self.maximum_age_unit = "" + self.healthy_volunteers = "" + self.inclusion_criteria = [] + self.exclusion_criteria = [] + self.study_population = "" + self.sampling_method = "" __tablename__ = "study_eligibility" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index fccd1622..256f36ac 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -10,7 +10,12 @@ class StudyIpdsharing(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study - + self.ipd_sharing = "" + self.ipd_sharing_description = "" + self.ipd_sharing_info_type_list =[] + self.ipd_sharing_time_frame = "" + self.ipd_sharing_access_criteria = "" + self.ipd_sharing_url = "" __tablename__ = "study_ipdsharing" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index fee797a8..080b5fa2 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -11,7 +11,10 @@ class StudyOther(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study - + self.oversight_has_dmc = False + self.conditions = [] + self.keywords = [] + self.size = "" __tablename__ = "study_other" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 62a52f53..cb12902a 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -10,7 +10,12 @@ class StudySponsorsCollaborators(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study - + self.responsible_party_type = "" + self.responsible_party_investigator_name = "" + self.responsible_party_investigator_title = "" + self.responsible_party_investigator_affiliation = "" + self.lead_sponsor_name = "" + self.collaborator_name = [] __tablename__ = "study_sponsors_collaborators" id = db.Column(db.CHAR(36), primary_key=True) @@ -57,7 +62,6 @@ def update(self, data): "responsible_party_investigator_affiliation" ] self.lead_sponsor_name = data["lead_sponsor_name"] - self.collaborator_name = data["collaborator_name"] @staticmethod def from_data_(study, data: dict): diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index fb83da35..7990a7ba 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -8,9 +8,13 @@ class StudyStatus(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) - # self.created_at = datetime.now() self.study = study - + self.overall_status = "" + self.why_stopped = "" + self.start_date = "" + self.start_date_type = "" + self.completion_date = "" + self.completion_date_type = "" __tablename__ = "study_status" id = db.Column(db.CHAR(36), primary_key=True) From 8e3487b8597410e302bdfe6d97808de7f616a74c Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 6 Sep 2023 14:15:26 -0700 Subject: [PATCH 103/505] fix: datetimes changed to timezones --- model/dataset.py | 14 +++++++------- model/dataset_contributor.py | 1 - model/dataset_versions.py | 1 - model/invited_study_contributor.py | 8 ++++---- model/participant.py | 18 ++++++++---------- model/user.py | 8 ++++---- model/version.py | 18 ++++++++++-------- 7 files changed, 33 insertions(+), 35 deletions(-) diff --git a/model/dataset.py b/model/dataset.py index 767c880a..979d21ee 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -1,7 +1,7 @@ import uuid - -from datetime import datetime +from datetime import timezone from sqlalchemy.sql.expression import true +import datetime import model @@ -12,12 +12,12 @@ class Dataset(db.Model): def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) - self.created_at = datetime.now() + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset" id = db.Column(db.CHAR(36), primary_key=True) - updated_on = db.Column(db.DateTime, nullable=False) - created_at = db.Column(db.DateTime, nullable=False) + updated_on = db.Column(db.BigInteger, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="dataset") @@ -62,7 +62,7 @@ def to_dict(self): return { "id": self.id, - "created_at": str(self.created_at), + "created_at": self.created_at, # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None, } @@ -85,5 +85,5 @@ def from_data(study, data: dict): def update(self, data: dict): """Creates a new dataset from a dictionary""" - self.updated_on = datetime.now() + self.updated_on = datetime.datetime.now(timezone.utc).timestamp() # self.dataset_versions = data["dataset_versions"] diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index 45e70242..a19d52a0 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -1,5 +1,4 @@ import uuid - from .db import db diff --git a/model/dataset_versions.py b/model/dataset_versions.py index 6ee5e780..9c01bf50 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -1,6 +1,5 @@ import model - class DatasetVersions: def __init__( self, diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index b79d76da..a106ebbf 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,7 +1,7 @@ import uuid from datetime import datetime from .db import db - +import datetime class StudyInvitedContributor(db.Model): def __init__(self): @@ -10,7 +10,7 @@ def __init__(self): __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) - invited_on = db.Column(db.DateTime, nullable=False) + invited_on = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) study = db.relationship("Study", back_populates="invited_contributors") @@ -19,7 +19,7 @@ def to_dict(self): return { "email_address": self.id, "permission": self.permission, - "invited_on": datetime.now(), + "invited_on": self.invited_on, } @staticmethod @@ -27,6 +27,6 @@ def from_data(data: dict): invited_contributor = StudyInvitedContributor() invited_contributor.email_address = data["email_address"] invited_contributor.permission = data["permission"] - invited_contributor.invited_on = data["invited_on"] + invited_contributor.invited_on = datetime.datetime.now(timezone.utc).timestamp() return invited_contributor diff --git a/model/participant.py b/model/participant.py index f10f799f..355f9950 100644 --- a/model/participant.py +++ b/model/participant.py @@ -1,15 +1,14 @@ import uuid - import model -from datetime import datetime +from datetime import timezone from .db import db - +import datetime class Participant(db.Model): def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) - self.created_at = datetime.now() + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "participant" id = db.Column(db.CHAR(36), primary_key=True) @@ -17,8 +16,8 @@ def __init__(self, study): last_name = db.Column(db.String, nullable=False) address = db.Column(db.String, nullable=False) age = db.Column(db.String, nullable=False) - created_at = db.Column(db.DateTime, nullable=False) - updated_on = db.Column(db.DateTime, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + updated_on = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="participants") @@ -35,8 +34,8 @@ def to_dict(self): "last_name": self.last_name, "address": self.address, "age": self.age, - "created_at": str(self.created_at), - "updated_on": str(self.updated_on), + "created_at": self.created_at, + "updated_on": self.updated_on, } @staticmethod @@ -46,9 +45,8 @@ def from_data(data: dict, study): return participant def update(self, data): - # self.id = data["id"] self.first_name = data["first_name"] self.last_name = data["last_name"] self.address = data["address"] self.age = data["age"] - self.updated_on = datetime.now() + self.updated_on = datetime.datetime.now(timezone.utc).timestamp() diff --git a/model/user.py b/model/user.py index 726be699..f9a6ac1c 100644 --- a/model/user.py +++ b/model/user.py @@ -1,7 +1,8 @@ import uuid from datetime import datetime from .db import db - +import datetime +from datetime import timezone class User(db.Model): def __init__(self): @@ -15,7 +16,7 @@ def __init__(self): last_name = db.Column(db.String, nullable=False) orcid = db.Column(db.String, nullable=False) hash = db.Column(db.String, nullable=False) - created_at = db.Column(db.DateTime, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) institution = db.Column(db.String, nullable=False) study_contributors = db.relationship("StudyContributor", back_populates="user") @@ -28,7 +29,7 @@ def to_dict(self): "last_name": self.last_name, "orcid": self.orcid, "hash": self.hash, - "created_at": str(datetime.now()), + "created_at": self.created_at, "institution": self.institution, } @@ -41,6 +42,5 @@ def from_data(data: dict): user.last_name = data["last_name"] user.orcid = data["orcid"] user.hash = data["hash"] - user.created_at = data["created_at"] user.institution = data["institution"] return user diff --git a/model/version.py b/model/version.py index 54d8fea8..b7240706 100644 --- a/model/version.py +++ b/model/version.py @@ -1,7 +1,7 @@ import uuid -from datetime import datetime +from datetime import timezone +import datetime from model.dataset import Dataset - from .db import db version_participants = db.Table( @@ -23,10 +23,10 @@ def __init__(self, dataset): title = db.Column(db.String, nullable=False) published = db.Column(db.BOOLEAN, nullable=False) changelog = db.Column(db.String, nullable=False) - updated_on = db.Column(db.DateTime, nullable=False) + updated_on = db.Column(db.BigInteger, nullable=False) doi = db.Column(db.String, nullable=False) - created_at = db.Column(db.DateTime, nullable=False) - published_on = db.Column(db.DateTime, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + published_on = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) dataset = db.relationship("Dataset", back_populates="dataset_versions") @@ -37,8 +37,9 @@ def to_dict(self): "id": self.id, "title": self.title, "changelog": self.changelog, - "published_on": str(datetime.now()), - "created_at": str(datetime.now()), + "published_on": self.published_on, + "updated_on": self.updated_on, + "created_at": self.created_at, "doi": self.doi, "published": self.published, "participants": [p.id for p in self.participants], @@ -54,6 +55,7 @@ def update(self, data): self.title = data["title"] self.published = data["published"] self.doi = data["doi"] - self.published_on = data["published_on"] + self.published_on = datetime.datetime.now(timezone.utc).timestamp() + self.updated_on = datetime.datetime.now(timezone.utc).timestamp() self.participants[:] = data["participants"] self.changelog = data["changelog"] From 9f51559790490dede80ee2554c4aa300f7413fd0 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 6 Sep 2023 21:16:13 +0000 Subject: [PATCH 104/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_other.py | 4 +++- apis/study_metadata/study_sponsors_collaborators.py | 4 +++- model/dataset_versions.py | 1 + model/invited_study_contributor.py | 1 + model/participant.py | 1 + model/study_metadata/study_description.py | 4 ++-- model/study_metadata/study_eligibility.py | 1 + model/study_metadata/study_ipdsharing.py | 3 ++- model/study_metadata/study_other.py | 1 + model/study_metadata/study_sponsors_collaborators.py | 1 + model/study_metadata/study_status.py | 1 + model/user.py | 1 + 12 files changed, 18 insertions(+), 5 deletions(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 3b0742f6..3370f458 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -51,7 +51,9 @@ def get(self, study_id: int): def put(self, study_id: int): data = request.json study_ = Study.query.get(study_id) - study_oversight = study_.study_other.oversight_has_dmc = data["oversight_has_dmc"] + study_oversight = study_.study_other.oversight_has_dmc = data[ + "oversight_has_dmc" + ] db.session.commit() return study_oversight diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 0162b71c..ff0d9157 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -61,7 +61,9 @@ def get(self, study_id: int): def put(self, study_id: int): data = request.json study_ = Study.query.get(study_id) - study_.study_sponsors_collaborators.collaborator_name = data["collaborator_name"] + study_.study_sponsors_collaborators.collaborator_name = data[ + "collaborator_name" + ] db.session.commit() return study_.study_sponsors_collaborators.collaborator_name diff --git a/model/dataset_versions.py b/model/dataset_versions.py index 9c01bf50..6ee5e780 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -1,5 +1,6 @@ import model + class DatasetVersions: def __init__( self, diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index a106ebbf..f062f78a 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -3,6 +3,7 @@ from .db import db import datetime + class StudyInvitedContributor(db.Model): def __init__(self): self.id = str(uuid.uuid4()) diff --git a/model/participant.py b/model/participant.py index 355f9950..95de0e90 100644 --- a/model/participant.py +++ b/model/participant.py @@ -4,6 +4,7 @@ from .db import db import datetime + class Participant(db.Model): def __init__(self, study): self.study = study diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index e6ecfcca..14b7fa52 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -8,8 +8,8 @@ class StudyDescription(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study - self.brief_summary = '' - self.detailed_description = '' + self.brief_summary = "" + self.detailed_description = "" __tablename__ = "study_description" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 9c520a7f..c533387e 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -22,6 +22,7 @@ def __init__(self, study): self.exclusion_criteria = [] self.study_population = "" self.sampling_method = "" + __tablename__ = "study_eligibility" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 256f36ac..af9ba31f 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -12,10 +12,11 @@ def __init__(self, study): self.study = study self.ipd_sharing = "" self.ipd_sharing_description = "" - self.ipd_sharing_info_type_list =[] + self.ipd_sharing_info_type_list = [] self.ipd_sharing_time_frame = "" self.ipd_sharing_access_criteria = "" self.ipd_sharing_url = "" + __tablename__ = "study_ipdsharing" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 080b5fa2..3f14ff3f 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -15,6 +15,7 @@ def __init__(self, study): self.conditions = [] self.keywords = [] self.size = "" + __tablename__ = "study_other" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index cb12902a..3b527623 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -16,6 +16,7 @@ def __init__(self, study): self.responsible_party_investigator_affiliation = "" self.lead_sponsor_name = "" self.collaborator_name = [] + __tablename__ = "study_sponsors_collaborators" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 7990a7ba..cf536d87 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -15,6 +15,7 @@ def __init__(self, study): self.start_date_type = "" self.completion_date = "" self.completion_date_type = "" + __tablename__ = "study_status" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/user.py b/model/user.py index f9a6ac1c..80751dc0 100644 --- a/model/user.py +++ b/model/user.py @@ -4,6 +4,7 @@ import datetime from datetime import timezone + class User(db.Model): def __init__(self): self.id = str(uuid.uuid4()) From a8530350742cb626cf43bddfb817e980e5f03b21 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 6 Sep 2023 16:50:34 -0700 Subject: [PATCH 105/505] fix: updated SQL queries --- sql/all_tables.sql | 988 --------------------------------------------- 1 file changed, 988 deletions(-) delete mode 100644 sql/all_tables.sql diff --git a/sql/all_tables.sql b/sql/all_tables.sql deleted file mode 100644 index 1e3a33c4..00000000 --- a/sql/all_tables.sql +++ /dev/null @@ -1,988 +0,0 @@ --- -------------------------------------------------------- --- Host: 7hg.h.filess.io --- Server version: PostgreSQL 14.4 on x86_64-pc-linux-musl, compiled by gcc (Alpine 11.2.1_git20220219) 11.2.1 20220219, 64-bit --- Server OS: --- HeidiSQL Version: 12.3.0.6589 --- -------------------------------------------------------- - -/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; -/*!40101 SET NAMES */; -/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; -/*!40103 SET TIME_ZONE='+00:00' */; -/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; -/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; -/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; - --- Dumping structure for table public.dataset -CREATE TABLE IF NOT EXISTS "dataset" ( - "id" CHAR(36) NOT NULL, - "updated_on" TIMESTAMP NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset: 4 rows -/*!40000 ALTER TABLE "dataset" DISABLE KEYS */; -INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000004'); -/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; - --- Dumping structure for table public.dataset_access -CREATE TABLE IF NOT EXISTS "dataset_access" ( - "id" CHAR(36) NOT NULL, - "type" VARCHAR NOT NULL, - "description" VARCHAR NOT NULL, - "url" VARCHAR NOT NULL, - "url_last_checked" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_access_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_access: -1 rows -/*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; -INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'main', 'Clinical research studies ', 'https://aireadi.org', '1st August', NULL), - ('badac1ab-26fd-4f94-b2b4-b198365a198f', 'none', '', '', '', NULL), - ('6d2c020f-71b1-48d2-8532-89a563868fa4', 'none', '', '', '', NULL), - ('f8f3bf91-2eb9-49b8-a8f0-1c92def99bcf', 'none', '', '', '', NULL), - ('395d37d9-e3cf-4989-81f6-21dd2202d1ca', 'none', '', '', '', NULL), - ('fdc10b6d-2dc6-41c1-b43e-202a24abc80a', 'none', '', '', '', NULL); -/*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; - --- Dumping structure for table public.dataset_alternate_identifier -CREATE TABLE IF NOT EXISTS "dataset_alternate_identifier" ( - "id" CHAR(36) NOT NULL, - "identifier" VARCHAR NOT NULL, - "identifier_type" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_identifier_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_alternate_identifier: 3 rows -/*!40000 ALTER TABLE "dataset_alternate_identifier" DISABLE KEYS */; -INSERT INTO "dataset_alternate_identifier" ("id", "identifier", "identifier_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', NULL), - ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', NULL), - ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', NULL); -/*!40000 ALTER TABLE "dataset_alternate_identifier" ENABLE KEYS */; - --- Dumping structure for table public.dataset_consent -CREATE TABLE IF NOT EXISTS "dataset_consent" ( - "id" CHAR(36) NOT NULL, - "type" VARCHAR NOT NULL, - "noncommercial" BOOLEAN NOT NULL, - "geog_restrict" BOOLEAN NOT NULL, - "research_type" BOOLEAN NOT NULL, - "genetic_only" BOOLEAN NOT NULL, - "no_methods" BOOLEAN NOT NULL, - "details" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_consent_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_consent: -1 rows -/*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; -INSERT INTO "dataset_consent" ("id", "type", "noncommercial", "geog_restrict", "research_type", "genetic_only", "no_methods", "details", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', NULL), - ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', NULL); -/*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; - --- Dumping structure for table public.dataset_contributor -CREATE TABLE IF NOT EXISTS "dataset_contributor" ( - "id" CHAR(36) NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, - "name_type" VARCHAR NOT NULL, - "name_identifier" VARCHAR NOT NULL, - "name_identifier_scheme" VARCHAR NOT NULL, - "name_identifier_scheme_uri" VARCHAR NOT NULL, - "creator" BOOLEAN NOT NULL, - "contributor_type" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_contributor_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_contributor: -1 rows -/*!40000 ALTER TABLE "dataset_contributor" DISABLE KEYS */; -INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', NULL); -/*!40000 ALTER TABLE "dataset_contributor" ENABLE KEYS */; - --- Dumping structure for table public.dataset_contributor_affiliation -CREATE TABLE IF NOT EXISTS "dataset_contributor_affiliation" ( - "id" CHAR(36) NOT NULL, - "identifier" VARCHAR NOT NULL, - "identifier_scheme" VARCHAR NOT NULL, - "identifier_scheme_uri" VARCHAR NOT NULL, - "dataset_contributor_id" VARCHAR NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_contributor_affiliation_dataset_contributor_id_fkey" FOREIGN KEY ("dataset_contributor_id") REFERENCES "dataset_contributor" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_contributor_affiliation: -1 rows -/*!40000 ALTER TABLE "dataset_contributor_affiliation" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_contributor_affiliation" ENABLE KEYS */; - --- Dumping structure for table public.dataset_date -CREATE TABLE IF NOT EXISTS "dataset_date" ( - "id" CHAR(36) NOT NULL, - "date" VARCHAR NOT NULL, - "date_type" VARCHAR NOT NULL, - "data_information" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_date_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_date: -1 rows -/*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; -INSERT INTO "dataset_date" ("id", "date", "date_type", "data_information", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000002', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000005'), - ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', NULL), - ('00000000-0000-0000-0000-000000000004', '2023', 'day', 'none', NULL), - ('0b1775e5-d110-482f-a1c4-2aa3947b8db8', '', 'na', 'none', NULL), - ('dc090dbd-6fa3-4b61-829e-2f139bdbd116', '', 'na', 'none', NULL); -/*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; - --- Dumping structure for table public.dataset_description -CREATE TABLE IF NOT EXISTS "dataset_description" ( - "id" CHAR(36) NOT NULL, - "description" VARCHAR NOT NULL, - "description_type" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_description_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_description: -1 rows -/*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; -INSERT INTO "dataset_description" ("id", "description", "description_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000001', '', '', NULL), - ('78f2b774-2f5a-4096-b82e-9923ca04395b', '', '', NULL); -/*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; - --- Dumping structure for table public.dataset_de_ident_level -CREATE TABLE IF NOT EXISTS "dataset_de_ident_level" ( - "id" CHAR(36) NOT NULL, - "type" VARCHAR NOT NULL, - "direct" BOOLEAN NOT NULL, - "hipaa" BOOLEAN NOT NULL, - "dates" BOOLEAN NOT NULL, - "nonarr" BOOLEAN NOT NULL, - "k_anon" BOOLEAN NOT NULL, - "details" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_de_ident_level_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_de_ident_level: -1 rows -/*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; -INSERT INTO "dataset_de_ident_level" ("id", "type", "direct", "hipaa", "dates", "nonarr", "k_anon", "details", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'NA', 'false', 'true', 'false', 'true', 'false', 'none', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', NULL), - ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', NULL), - ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', NULL); -/*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; - --- Dumping structure for table public.dataset_funder -CREATE TABLE IF NOT EXISTS "dataset_funder" ( - "id" CHAR(36) NOT NULL, - "name" VARCHAR NOT NULL, - "identifier" VARCHAR NOT NULL, - "identifier_type" VARCHAR NOT NULL, - "identifier_scheme_uri" VARCHAR NOT NULL, - "award_number" VARCHAR NOT NULL, - "award_uri" VARCHAR NOT NULL, - "award_title" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_funder_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_funder: -1 rows -/*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; -INSERT INTO "dataset_funder" ("id", "name", "identifier", "identifier_type", "identifier_scheme_uri", "award_number", "award_uri", "award_title", "dataset_id") VALUES - ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', NULL); -/*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; - --- Dumping structure for table public.dataset_managing_organization -CREATE TABLE IF NOT EXISTS "dataset_managing_organization" ( - "id" CHAR(36) NOT NULL, - "name" VARCHAR NOT NULL, - "ror_id" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_managing_organization_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_managing_organization: -1 rows -/*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; -INSERT INTO "dataset_managing_organization" ("id", "name", "ror_id", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', NULL), - ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', NULL); -/*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; - --- Dumping structure for table public.dataset_other -CREATE TABLE IF NOT EXISTS "dataset_other" ( - "id" CHAR(36) NOT NULL, - "language" VARCHAR NOT NULL, - "managing_organization_name" VARCHAR NOT NULL, - "managing_organization_ror_id" VARCHAR NOT NULL, - "size" UNKNOWN NOT NULL, - "standards_followed" VARCHAR NOT NULL, - "acknowledgement" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_other_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_other: -1 rows -/*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; -INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "managing_organization_ror_id", "size", "standards_followed", "acknowledgement", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000003', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', NULL), - ('00000000-0000-0000-0000-000000000002', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', NULL), - ('2fca4640-6f0e-406c-8c7a-e93a0740b9c6', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org', 'NA', NULL); -/*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; - --- Dumping structure for table public.dataset_readme -CREATE TABLE IF NOT EXISTS "dataset_readme" ( - "id" CHAR(36) NOT NULL, - "content" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_readme: -1 rows -/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; -INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES - ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', NULL); -/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; - --- Dumping structure for table public.dataset_record_keys -CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( - "id" CHAR(36) NOT NULL, - "key_type" VARCHAR NOT NULL, - "key_details" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_record_keys_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_record_keys: -1 rows -/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; -INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES - ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', NULL), - ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', NULL), - ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', NULL), - ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', NULL); -/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; - --- Dumping structure for table public.dataset_related_item -CREATE TABLE IF NOT EXISTS "dataset_related_item" ( - "id" CHAR(36) NOT NULL, - "type" VARCHAR NOT NULL, - "relation_type" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_related_item_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_related_item: -1 rows -/*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; -INSERT INTO "dataset_related_item" ("id", "type", "relation_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'main', 'main', '00000000-0000-0000-0000-000000000002'), - ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', NULL); -/*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; - --- Dumping structure for table public.dataset_related_item_contributor -CREATE TABLE IF NOT EXISTS "dataset_related_item_contributor" ( - "id" CHAR(36) NOT NULL, - "name" VARCHAR NOT NULL, - "name_type" VARCHAR NOT NULL, - "creator" BOOLEAN NOT NULL, - "contributor_type" VARCHAR NOT NULL, - "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_related_item_contributor_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_related_item_contributor: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; -INSERT INTO "dataset_related_item_contributor" ("id", "name", "name_type", "creator", "contributor_type", "dataset_related_item_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'string', 'true', 'owner', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; - --- Dumping structure for table public.dataset_related_item_identifier -CREATE TABLE IF NOT EXISTS "dataset_related_item_identifier" ( - "id" CHAR(36) NOT NULL, - "identifier" VARCHAR NOT NULL, - "type" VARCHAR NOT NULL, - "metadata_scheme" VARCHAR NOT NULL, - "scheme_uri" VARCHAR NOT NULL, - "scheme_type" VARCHAR NOT NULL, - "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_related_item_identifier_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_related_item_identifier: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_identifier" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_related_item_identifier" ENABLE KEYS */; - --- Dumping structure for table public.dataset_related_item_other -CREATE TABLE IF NOT EXISTS "dataset_related_item_other" ( - "id" CHAR(36) NOT NULL, - "publication_year" VARCHAR NOT NULL, - "volume" VARCHAR NOT NULL, - "issue" VARCHAR NOT NULL, - "number_value" VARCHAR NOT NULL, - "number_type" VARCHAR NOT NULL, - "first_page" VARCHAR NOT NULL, - "last_page" BOOLEAN NOT NULL, - "publisher" VARCHAR NOT NULL, - "edition" VARCHAR NOT NULL, - "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_related_item_other_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_related_item_other: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_other" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_related_item_other" ENABLE KEYS */; - --- Dumping structure for table public.dataset_related_item_title -CREATE TABLE IF NOT EXISTS "dataset_related_item_title" ( - "id" CHAR(36) NOT NULL, - "type" VARCHAR NOT NULL, - "title" VARCHAR NOT NULL, - "dataset_related_item_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_related_item_title_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_related_item_title: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_title" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_related_item_title" ENABLE KEYS */; - --- Dumping structure for table public.dataset_rights -CREATE TABLE IF NOT EXISTS "dataset_rights" ( - "id" CHAR(36) NOT NULL, - "rights" VARCHAR NOT NULL, - "uri" VARCHAR NOT NULL, - "identifier" VARCHAR NOT NULL, - "identifier_scheme" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_rights_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_rights: -1 rows -/*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; -INSERT INTO "dataset_rights" ("id", "rights", "uri", "identifier", "identifier_scheme", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', NULL), - ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', NULL); -/*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; - --- Dumping structure for table public.dataset_subject -CREATE TABLE IF NOT EXISTS "dataset_subject" ( - "id" CHAR(36) NOT NULL, - "subject" VARCHAR NOT NULL, - "scheme" VARCHAR NOT NULL, - "scheme_uri" VARCHAR NOT NULL, - "value_uri" VARCHAR NOT NULL, - "classification_code" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_subject_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_subject: -1 rows -/*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; -INSERT INTO "dataset_subject" ("id", "subject", "scheme", "scheme_uri", "value_uri", "classification_code", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', NULL), - ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', NULL); -/*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; - --- Dumping structure for table public.dataset_title -CREATE TABLE IF NOT EXISTS "dataset_title" ( - "id" CHAR(36) NOT NULL, - "title" VARCHAR NOT NULL, - "type" VARCHAR NOT NULL, - "dataset_id" VARCHAR NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_title_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_title: -1 rows -/*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; -INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES - ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', NULL); -/*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; - --- Dumping structure for table public.invited_study_contributor -CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( - "email_address" VARCHAR NOT NULL, - "permission" VARCHAR NOT NULL, - "invited_on" TIMESTAMP NOT NULL, - "study_id" CHAR(36) NOT NULL, - PRIMARY KEY ("email_address", "study_id"), - CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.invited_study_contributor: -1 rows -/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; -INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES - ('aydan.gasimova@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('bhavesh.patel@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000003'), - ('sanjay.soundarajan@@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000004'); -/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; - --- Dumping structure for table public.participant -CREATE TABLE IF NOT EXISTS "participant" ( - "id" CHAR(36) NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, - "address" VARCHAR NOT NULL, - "age" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "updated_on" TIMESTAMP NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "participant_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.participant: -1 rows -/*!40000 ALTER TABLE "participant" DISABLE KEYS */; -INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000004'), - ('921ba857-dd08-4149-8f5c-245c6c93ef84', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:23.627034', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('458d2c15-6ed8-4f70-a47d-70b42f2f1b86', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:36.656094', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('35750167-40c5-4f4a-9d8e-ebe89c2efcfc', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:52.555088', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:59.614647', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('b444520d-0eac-4065-a86d-004481f68d8a', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:45:49.495595', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('88c7592a-4382-4d6b-a197-e880e49db3c0', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:46:17.682171', '2023-08-29 13:46:17.682171', '00000000-0000-0000-0000-000000000001'), - ('ba73ed99-6ec2-46e0-acdb-4a00c31dd572', 'aydan', 'gasimova', '1221d kibler drive', '20', '2023-08-29 15:08:03.758771', '2023-08-29 15:08:03.758771', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-13 16:33:53', '2023-08-29 15:09:04.323914', '00000000-0000-0000-0000-000000000001'), - ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-29 15:15:35.891076', '2023-08-29 15:15:35.891076', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "participant" ENABLE KEYS */; - --- Dumping structure for table public.study -CREATE TABLE IF NOT EXISTS "study" ( - "id" CHAR(36) NOT NULL, - "title" VARCHAR NOT NULL, - "image" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "updated_on" TIMESTAMP NOT NULL, - PRIMARY KEY ("id") -); - --- Dumping data for table public.study: -1 rows -/*!40000 ALTER TABLE "study" DISABLE KEYS */; -INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), - ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://loremflickr.com/640/480?lock=342651989655552', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), - ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), - ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://loremflickr.com/640/480?lock=342651989655552', '2023-08-03 12:33:10', '2023-01-03 12:33:11'), - ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://loremflickr.com/640/480?lock=342651989655552', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), - ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://loremflickr.com/640/480?lock=342651989655552', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), - ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://loremflickr.com/640/480?lock=342651989655552', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), - ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://loremflickr.com/640/480?lock=342651989655552', '2021-08-03 12:33:10', '2023-05-03 12:33:11'); -/*!40000 ALTER TABLE "study" ENABLE KEYS */; - --- Dumping structure for table public.study_arm -CREATE TABLE IF NOT EXISTS "study_arm" ( - "id" CHAR(36) NOT NULL, - "label" VARCHAR NOT NULL, - "type" VARCHAR NOT NULL, - "description" VARCHAR NOT NULL, - "intervention_list" UNKNOWN NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_arm_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_arm: -1 rows -/*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; -INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Active Comparator', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000003'), - ('75edc7d3-ab7c-404d-a6dd-b55f7fe6446d', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('2b26a772-b4af-4e61-9e76-6642746b78ee', '', '', '', '{""}', '00000000-0000-0000-0000-000000000001'), - ('a82a5e49-a735-4ba3-ab2e-ba64e7fb464c', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'label', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000002'), - ('ba03826c-b9db-4517-aeaa-031793de4a25', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('a11728f0-fadb-4bd0-be09-511d5fb39649', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'), - ('311fed5e-fd7a-4a02-8465-3b55a05cab04', 'label1', 'type', 'description', '{list}', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; - --- Dumping structure for table public.study_available_ipd -CREATE TABLE IF NOT EXISTS "study_available_ipd" ( - "id" CHAR(36) NOT NULL, - "identifier" VARCHAR NOT NULL, - "type" VARCHAR NOT NULL, - "url" VARCHAR NOT NULL, - "comment" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_available_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_available_ipd: -1 rows -/*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; -INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', ' for intermediate-size patient populations', 'available', 'https://json-schema.org/draft/2020-12/schema', 'none', '00000000-0000-0000-0000-000000000003'); -/*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; - --- Dumping structure for table public.study_contact -CREATE TABLE IF NOT EXISTS "study_contact" ( - "id" CHAR(36) NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, - "affiliation" VARCHAR NOT NULL, - "role" VARCHAR NOT NULL, - "phone" VARCHAR NOT NULL, - "phone_ext" VARCHAR NOT NULL, - "email_address" VARCHAR NOT NULL, - "central_contact" BOOLEAN NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_contact_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_contact: -1 rows -/*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; -INSERT INTO "study_contact" ("id", "first_name", "last_name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'holly', 'sienna', 'calmi2', 'editor', '4056074345', 'ext', 'holly.sienna@gmail.com', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'), - ('81e71d41-2c93-47cb-9fac-00d94ab1c1a2', 'billy', 'brown', 'calmi2', 'editor', '4056074345', 'ext', 'billy.sanders@gmail.com', 'true', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; - --- Dumping structure for table public.study_contributor -CREATE TABLE IF NOT EXISTS "study_contributor" ( - "permission" VARCHAR NOT NULL, - "user_id" CHAR(36) NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("user_id"), - CONSTRAINT "study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, - CONSTRAINT "study_contributor_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_contributor: -1 rows -/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; -INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES - ('editor', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), - ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), - ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000004'), - ('editor', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000006'); -/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; - --- Dumping structure for table public.study_description -CREATE TABLE IF NOT EXISTS "study_description" ( - "id" CHAR(36) NOT NULL, - "brief_summary" VARCHAR NOT NULL, - "detailed_description" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_description_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_description: -1 rows -/*!40000 ALTER TABLE "study_description" DISABLE KEYS */; -INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000003'), - ('f51a772e-373a-452a-8106-822840a76339', 'study summary', 'This is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_description" ENABLE KEYS */; - --- Dumping structure for table public.study_design -CREATE TABLE IF NOT EXISTS "study_design" ( - "id" CHAR(36) NOT NULL, - "design_allocation" VARCHAR NOT NULL, - "study_type" VARCHAR NOT NULL, - "design_interventional_model" VARCHAR NOT NULL, - "design_intervention_model_description" VARCHAR NOT NULL, - "design_primary_purpose" VARCHAR NOT NULL, - "design_masking" VARCHAR NOT NULL, - "design_masking_description" VARCHAR NOT NULL, - "design_who_masked_list" UNKNOWN NOT NULL, - "phase_list" UNKNOWN NOT NULL, - "enrollment_count" INTEGER NOT NULL, - "enrollment_type" VARCHAR NOT NULL, - "number_arms" INTEGER NOT NULL, - "design_observational_model_list" UNKNOWN NOT NULL, - "design_time_perspective_list" UNKNOWN NOT NULL, - "bio_spec_retention" VARCHAR NOT NULL, - "bio_spec_description" VARCHAR NOT NULL, - "target_duration" VARCHAR NOT NULL, - "number_groups_cohorts" INTEGER NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_design_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_design: -1 rows -/*!40000 ALTER TABLE "study_design" DISABLE KEYS */; -INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_interventional_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Randomized', 'type', 'treatment of cancer', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '1 years', 10, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'Randomized', 'type', 'treatment', 'description', 'Single Group Assignment', 'Blinded', 'description', '{Participant}', '{Trials}', 1, 'enrollmentInfo', 2, '{casecontrol}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3 months', 10, '00000000-0000-0000-0000-000000000002'), - ('2b1312ef-338b-454a-9e17-5db84e17d97c', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{[,'',P,a,r,t,i,c,i,p,a,n,t,'',]}', '{Trials}', 1, 'enrollmentInfo', 2, '{[,'',C,a,s,e,C,o,n,t,r,o,l,'',]}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'), - ('ca5500a4-cbce-454a-a767-653461d59397', 'Randomized', 'type', 'biomedical chemistry', 'description', 'Single Group Assignment', 'Blinded', 'description', '{CaseControl}', '{Trials}', 1, 'enrollmentInfo', 2, '{CaseControl}', '{Retrospective}', 'Samples With DNA', 'Specify all types', '3years', 10, '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_design" ENABLE KEYS */; - --- Dumping structure for table public.study_eligibility -CREATE TABLE IF NOT EXISTS "study_eligibility" ( - "id" CHAR(36) NOT NULL, - "gender" VARCHAR NOT NULL, - "gender_based" VARCHAR NOT NULL, - "gender_description" VARCHAR NOT NULL, - "healthy_volunteers" BOOLEAN NOT NULL, - "inclusion_criteria" UNKNOWN NOT NULL, - "exclusion_criteria" UNKNOWN NOT NULL, - "study_population" VARCHAR NOT NULL, - "sampling_method" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - "minimum_age_value" INTEGER NOT NULL, - "minimum_age_unit" VARCHAR NOT NULL, - "maximum_age_value" INTEGER NOT NULL, - "maximum_age_unit" VARCHAR NOT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_eligibility_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_eligibility: 6 rows -/*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; -INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id", "minimum_age_value", "minimum_age_unit", "maximum_age_value", "maximum_age_unit") VALUES - ('00000000-0000-0000-0000-000000000004', 'female', 'Correct', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 30, 'UCSD', 54, 'UW'), - ('dfac0d9e-a104-4f4b-ac1d-05f3699c72f3', 'female', 'Not given', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 23, 'UCSD', 32, 'UW'), - ('00000000-0000-0000-0000-000000000002', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 45, 'UCLA', 43, 'UCLA'), - ('00000000-0000-0000-0000-000000000001', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 24, 'UCSD', 34, 'UCLA'), - ('00000000-0000-0000-0000-000000000003', 'female', 'True', 'none', 'false', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 56, 'UCLA', 37, 'UCSD'), - ('01ac64ef-cfca-47bc-8f30-67525017461f', 'female', 'True', 'none', 'true', '{concluded}', '{none}', 'primary care clinic', 'Probability Sample', '00000000-0000-0000-0000-000000000001', 34, 'UW', 29, 'UW'); -/*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; - --- Dumping structure for table public.study_identification -CREATE TABLE IF NOT EXISTS "study_identification" ( - "id" CHAR(36) NOT NULL, - "identifier" VARCHAR NOT NULL, - "identifier_type" VARCHAR NOT NULL, - "identifier_domain" VARCHAR NOT NULL, - "identifier_link" VARCHAR NOT NULL, - "secondary" BOOLEAN NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_identification_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_identification: -1 rows -/*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; -INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), - ('cfc1b66c-882a-4eee-a6d7-01a7cb018ac2', 'Screening', 'Registry Identifier', 'registry Identifier', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; - --- Dumping structure for table public.study_intervention -CREATE TABLE IF NOT EXISTS "study_intervention" ( - "id" CHAR(36) NOT NULL, - "type" VARCHAR NOT NULL, - "name" VARCHAR NOT NULL, - "description" VARCHAR NOT NULL, - "arm_group_label_list" UNKNOWN NOT NULL, - "other_name_list" UNKNOWN NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_intervention_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_intervention: -1 rows -/*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; -INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'Drug', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'Procedure/Surgery', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'Radiation', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000004'), - ('70eecc49-2c32-47a4-a176-2abb57334fab', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('ede01416-9693-4095-bdae-a2c144a9ec82', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('f6c68d25-8a1c-47ec-9b8d-4db36cf3fecd', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'Device', 'intervention name updatee', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'), - ('65ef7ce9-4992-47a1-8a86-355792ca6fbc', 'Device', 'intervention name', 'Other current and former name', '{"Arm Group Label"}', '{"Arm other list"}', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; - --- Dumping structure for table public.study_ipdsharing -CREATE TABLE IF NOT EXISTS "study_ipdsharing" ( - "id" CHAR(36) NOT NULL, - "ipd_sharing" VARCHAR NOT NULL, - "ipd_sharing_description" VARCHAR NOT NULL, - "ipd_sharing_info_type_list" UNKNOWN NOT NULL, - "ipd_sharing_time_frame" VARCHAR NOT NULL, - "ipd_sharing_access_criteria" VARCHAR NOT NULL, - "ipd_sharing_url" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_ipdsharing_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_ipdsharing: -1 rows -/*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; -INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000003'), - ('ebfe1211-763e-4b10-8e15-7ccb29cb21f5', 'IPDSharing', 'unplanned', '{"Statistical Analysis Plan (SAP)"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; - --- Dumping structure for table public.study_link -CREATE TABLE IF NOT EXISTS "study_link" ( - "id" CHAR(36) NOT NULL, - "url" VARCHAR NOT NULL, - "title" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_link_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_link: -1 rows -/*!40000 ALTER TABLE "study_link" DISABLE KEYS */; -INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000003'), - ('e354922c-9ab3-4b38-ba79-c4d4640737d2', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'), - ('040d305e-504d-433b-b5c2-7d56c24d440a', 'https://schema.aireadi.org/', 'schema', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_link" ENABLE KEYS */; - --- Dumping structure for table public.study_location -CREATE TABLE IF NOT EXISTS "study_location" ( - "id" CHAR(36) NOT NULL, - "facility" VARCHAR NOT NULL, - "status" VARCHAR NOT NULL, - "city" VARCHAR NOT NULL, - "state" VARCHAR NOT NULL, - "zip" VARCHAR NOT NULL, - "country" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_location_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_location: -1 rows -/*!40000 ALTER TABLE "study_location" DISABLE KEYS */; -INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000005', 'facility', 'active', 'San diego', 'CA', '92121', 'sAN dIEGO', '00000000-0000-0000-0000-000000000004'), - ('cda2dc03-95cf-494a-87ea-aac49ac07f0b', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'), - ('72d6a140-e57b-4ba4-a57d-391cdc871c21', 'facility', 'active', 'San diego', 'CA', '92121', 'San diego', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_location" ENABLE KEYS */; - --- Dumping structure for table public.study_other -CREATE TABLE IF NOT EXISTS "study_other" ( - "id" CHAR(36) NOT NULL, - "oversight_has_dmc" BOOLEAN NOT NULL, - "conditions" UNKNOWN NOT NULL, - "keywords" UNKNOWN NOT NULL, - "size" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_other_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_other: -1 rows -/*!40000 ALTER TABLE "study_other" DISABLE KEYS */; -INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000003', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000001', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'), - ('cd440fa9-988b-4d51-8b66-8c2e42c630b3', 'false', '{conditional}', '{none}', '1', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_other" ENABLE KEYS */; - --- Dumping structure for table public.study_overall_official -CREATE TABLE IF NOT EXISTS "study_overall_official" ( - "id" CHAR(36) NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, - "affiliation" VARCHAR NOT NULL, - "role" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_overall_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_overall_official: -1 rows -/*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; -INSERT INTO "study_overall_official" ("id", "first_name", "last_name", "affiliation", "role", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'firstname', 'lastname', 'affiliation', 'Study Chair, Study Director', '00000000-0000-0000-0000-000000000003'), - ('a0806089-6602-48b0-b870-1d5e91b956a5', 'firstname', 'lastname', 'affiliation', 'Study Chair', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; - --- Dumping structure for table public.study_reference -CREATE TABLE IF NOT EXISTS "study_reference" ( - "id" CHAR(36) NOT NULL, - "identifier" VARCHAR NOT NULL, - "type" VARCHAR NOT NULL, - "citation" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_reference_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_reference: 6 rows -/*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; -INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'The PubMed Unique Identifier ', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'The PubMed Unique Identifier ', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), - ('2996e115-8c44-4914-a470-2764ff280316', 'The PubMed Unique Identifier ', 'false', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'The PubMed Unique Identifier ', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'The PubMed Unique Identifier ', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000005', 'The PubMed Unique Identifier ', 'type', 'A bibliographic reference', '00000000-0000-0000-0000-000000000004'); -/*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; - --- Dumping structure for table public.study_sponsors_collaborators -CREATE TABLE IF NOT EXISTS "study_sponsors_collaborators" ( - "id" CHAR(36) NOT NULL, - "responsible_party_type" VARCHAR NOT NULL, - "responsible_party_investigator_name" VARCHAR NOT NULL, - "responsible_party_investigator_title" VARCHAR NOT NULL, - "responsible_party_investigator_affiliation" VARCHAR NOT NULL, - "lead_sponsor_name" VARCHAR NOT NULL, - "collaborator_name" UNKNOWN NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_sponsors_collaborators_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_sponsors_collaborators: -1 rows -/*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; -INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name", "collaborator_name", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000005', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'), - ('687dea6a-4dbf-45dc-867e-de7b303d4b0c', 'San Diego', 'firstname', 'title', 'affiliation', 'name', '{"clinical study"}', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; - --- Dumping structure for table public.study_status -CREATE TABLE IF NOT EXISTS "study_status" ( - "id" CHAR(36) NOT NULL, - "overall_status" VARCHAR NOT NULL, - "why_stopped" VARCHAR NOT NULL, - "start_date" TIMESTAMP NOT NULL, - "start_date_type" VARCHAR NOT NULL, - "completion_date" TIMESTAMP NOT NULL, - "completion_date_type" VARCHAR NOT NULL, - "study_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_status: -1 rows -/*!40000 ALTER TABLE "study_status" DISABLE KEYS */; -INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2021-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Terminated', '2020-08-21 12:57:34', 'anticipated', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000001'), - ('8100ce8e-406d-4483-bc47-634e97c34713', 'Overall Recruitment Status for the study must be ''Recruiting''', 'Suspended', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'anticipated', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_status" ENABLE KEYS */; - --- Dumping structure for table public.user -CREATE TABLE IF NOT EXISTS "user" ( - "id" CHAR(36) NOT NULL, - "email_address" VARCHAR NOT NULL, - "username" VARCHAR NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, - "orcid" VARCHAR NOT NULL, - "hash" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "institution" VARCHAR NOT NULL, - PRIMARY KEY ("id") -); - --- Dumping data for table public.user: -1 rows -/*!40000 ALTER TABLE "user" DISABLE KEYS */; -INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES - ('00000000-0000-0000-0000-000000000001', 'bhavesh.patel@gmail.com', 'bhavesh', 'Bhavesh', 'Patel', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), - ('00000000-0000-0000-0000-000000000002', 'sanjay.soundarajan@gmail.com', 'sanjay', 'sanjay', 'soundarajan', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), - ('00000000-0000-0000-0000-000000000003', 'billy.sanders@gmail.com', 'billy', 'billy', 'sanders', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'), - ('00000000-0000-0000-0000-000000000004', 'james.lilly@gmail.com', 'james', 'james', 'lilly', '1111-2222-333-444-5555', 'hashed', '2023-08-13 12:34:06', 'CALMI2'); -/*!40000 ALTER TABLE "user" ENABLE KEYS */; - --- Dumping structure for table public.version -CREATE TABLE IF NOT EXISTS "version" ( - "id" CHAR(36) NOT NULL, - "title" VARCHAR NOT NULL, - "published" BOOLEAN NOT NULL, - "changelog" VARCHAR NOT NULL, - "updated_on" TIMESTAMP NOT NULL, - "doi" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "published_on" TIMESTAMP NOT NULL, - "dataset_id" CHAR(36) NULL DEFAULT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_version_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.version: -1 rows -/*!40000 ALTER TABLE "version" DISABLE KEYS */; -INSERT INTO "version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'AIREADI1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'AIREADI4', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000004'), - ('00000000-0000-0000-0000-000000000003', 'AIREADI3', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', NULL); -/*!40000 ALTER TABLE "version" ENABLE KEYS */; - --- Dumping structure for table public.version_participants -CREATE TABLE IF NOT EXISTS "version_participants" ( - "dataset_version_id" CHAR(36) NOT NULL, - "participant_id" CHAR(36) NOT NULL, - PRIMARY KEY ("dataset_version_id", "participant_id"), - CONSTRAINT "version_participants_dataset_version_id_fkey" FOREIGN KEY ("dataset_version_id") REFERENCES "version" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, - CONSTRAINT "version_participants_participant_id_fkey" FOREIGN KEY ("participant_id") REFERENCES "participant" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.version_participants: -1 rows -/*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; -INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES - ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; - -/*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; -/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; -/*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; -/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; -/*!40111 SET SQL_NOTES=IFNULL(@OLD_SQL_NOTES, 1) */; From b412a89d08a39eafb074e4320872ec7831aa51da Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 6 Sep 2023 16:53:20 -0700 Subject: [PATCH 106/505] fix: update SQL queries --- sql/init_timezones.sql | 1042 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1042 insertions(+) create mode 100644 sql/init_timezones.sql diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql new file mode 100644 index 00000000..67653f89 --- /dev/null +++ b/sql/init_timezones.sql @@ -0,0 +1,1042 @@ +BEGIN; +-- -------------------------------------------------------- +-- Host: 127.0.0.1 +-- Server version: PostgreSQL 15.4 (Debian 15.4-1.pgdg120+1) on x86_64-pc-linux-gnu, compiled by gcc (Debian 12.2.0-14) 12.2.0, 64-bit +-- Server OS: +-- HeidiSQL Version: 12.3.0.6589 +-- -------------------------------------------------------- + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET NAMES */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- Dumping structure for table public.study +CREATE TABLE IF NOT EXISTS "study" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "image" VARCHAR NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("id") +); + +-- Dumping data for table public.study: 11 rows +/*!40000 ALTER TABLE "study" DISABLE KEYS */; +INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES + ('ec0064ca-4f34-48a8-9dcc-1377c7ca0a59', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1693953573, 1693953573), + ('995d703e-a6d0-4dc2-95e7-3ce868eb9fb7', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1693953609, 1693953609), + ('f154f7c2-58a9-4b2e-808c-3d9a71dc99d2', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1693953736, 1693953736), + ('22910241-051c-4f42-9a58-890704df32ad', 'Small Cotton Shoes', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', 1693805470, 1693805470), + ('af2f9f5e-24eb-4b54-8fe1-ec76391b9af6', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1694017629, 1694017629), + ('cb24e1c9-b4b2-451a-89b7-e73556d50ca2', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1694023951, 1694023951), + ('e5a2a1d2-850f-465a-8fc1-6a1aec6d9e5a', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1693953512, 1694028729), + ('39a913b7-daad-4c86-ba07-9f6400c73a28', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1694031647, 1694031647), + ('61ac1bdb-1809-4d20-8e29-22d3f2d85252', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1694032751, 1694032751), + ('626f2a7e-fa8f-459e-9076-a3b2082a00d2', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1694032781, 1694032781), + ('ee3012e5-3c51-4d21-b16c-5ace1c80cf72', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1694035574, 1694035574), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000002', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1693805470, 1693805470), + ('d925991e-af73-4fa2-ab2a-7040140a57df', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1693805470, 1693805470), + ('b32ca9d9-9656-49b5-9707-bd157dff0ffb', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1693805470, 1693805470), + ('95711a87-16ee-4ebd-bc37-1b79b104bff3', 'Recycled Cotton Shirt', 'https://www.svgrepo.com/show/213127/image-warning.svg', 1693805470, 1693805470); +/*!40000 ALTER TABLE "study" ENABLE KEYS */; + + +-- Dumping structure for table public.dataset +CREATE TABLE IF NOT EXISTS "dataset" ( + "id" CHAR(36) NOT NULL, + "study_id" CHAR(36) NOT NULL, + "updated_on" BIGINT NOT NULL, + "created_at" BIGINT NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset: -1 rows +/*!40000 ALTER TABLE "dataset" DISABLE KEYS */; +INSERT INTO "dataset" ("id", "study_id", "updated_on", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000002', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000003', 1693957896, 1693957896), + ('c8b5eb7a-f939-44a3-86c1-7746e73329c4', '00000000-0000-0000-0000-000000000001', 1694031716, 1694031716); +/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_access +CREATE TABLE IF NOT EXISTS "dataset_access" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "url" VARCHAR NOT NULL, + "url_last_checked" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_access_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_access: -1 rows +/*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; +INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_alternate_identifier +CREATE TABLE IF NOT EXISTS "dataset_alternate_identifier" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_alternate_identifier_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_alternate_identifier: -1 rows +/*!40000 ALTER TABLE "dataset_alternate_identifier" DISABLE KEYS */; +INSERT INTO "dataset_alternate_identifier" ("id", "identifier", "identifier_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', '00000000-0000-0000-0000-000000000001'), + ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_alternate_identifier" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_consent +CREATE TABLE IF NOT EXISTS "dataset_consent" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "noncommercial" BOOLEAN NOT NULL, + "geog_restrict" BOOLEAN NOT NULL, + "research_type" BOOLEAN NOT NULL, + "genetic_only" BOOLEAN NOT NULL, + "no_methods" BOOLEAN NOT NULL, + "details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_consent_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_consent: -1 rows +/*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; +INSERT INTO "dataset_consent" ("id", "type", "noncommercial", "geog_restrict", "research_type", "genetic_only", "no_methods", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'), + ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_contributor +CREATE TABLE IF NOT EXISTS "dataset_contributor" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "name_type" VARCHAR NOT NULL, + "name_identifier" VARCHAR NOT NULL, + "name_identifier_scheme" VARCHAR NOT NULL, + "name_identifier_scheme_uri" VARCHAR NOT NULL, + "creator" BOOLEAN NOT NULL, + "contributor_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_contributor_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_contributor: -1 rows +/*!40000 ALTER TABLE "dataset_contributor" DISABLE KEYS */; +INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_contributor" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_contributor_affiliation +CREATE TABLE IF NOT EXISTS "dataset_contributor_affiliation" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_scheme" VARCHAR NOT NULL, + "identifier_scheme_uri" VARCHAR NOT NULL, + "dataset_contributor_id" VARCHAR NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_contributor_affiliation_dataset_contributor_id_fkey" FOREIGN KEY ("dataset_contributor_id") REFERENCES "dataset_contributor" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_contributor_affiliation: -1 rows +/*!40000 ALTER TABLE "dataset_contributor_affiliation" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_contributor_affiliation" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_date +CREATE TABLE IF NOT EXISTS "dataset_date" ( + "id" CHAR(36) NOT NULL, + "date" VARCHAR NOT NULL, + "date_type" VARCHAR NOT NULL, + "data_information" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_date_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_date: -1 rows +/*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; +INSERT INTO "dataset_date" ("id", "date", "date_type", "data_information", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000005'), + ('0b1775e5-d110-482f-a1c4-2aa3947b8db8', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'), + ('dc090dbd-6fa3-4b61-829e-2f139bdbd116', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_description +CREATE TABLE IF NOT EXISTS "dataset_description" ( + "id" CHAR(36) NOT NULL, + "description" VARCHAR NOT NULL, + "description_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_description_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_description: -1 rows +/*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; +INSERT INTO "dataset_description" ("id", "description", "description_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000004'), + ('78f2b774-2f5a-4096-b82e-9923ca04395b', '', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', '', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_de_ident_level +CREATE TABLE IF NOT EXISTS "dataset_de_ident_level" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "direct" BOOLEAN NOT NULL, + "hipaa" BOOLEAN NOT NULL, + "dates" BOOLEAN NOT NULL, + "nonarr" BOOLEAN NOT NULL, + "k_anon" BOOLEAN NOT NULL, + "details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_de_ident_level_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_de_ident_level: -1 rows +/*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; +INSERT INTO "dataset_de_ident_level" ("id", "type", "direct", "hipaa", "dates", "nonarr", "k_anon", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'NA', 'false', 'true', 'false', 'true', 'false', 'none', '00000000-0000-0000-0000-000000000002'), + ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'), + ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_funder +CREATE TABLE IF NOT EXISTS "dataset_funder" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "identifier_scheme_uri" VARCHAR NOT NULL, + "award_number" VARCHAR NOT NULL, + "award_uri" VARCHAR NOT NULL, + "award_title" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_funder_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_funder: -1 rows +/*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; +INSERT INTO "dataset_funder" ("id", "name", "identifier", "identifier_type", "identifier_scheme_uri", "award_number", "award_uri", "award_title", "dataset_id") VALUES + ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_managing_organization +CREATE TABLE IF NOT EXISTS "dataset_managing_organization" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "ror_id" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_managing_organization_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_managing_organization: -1 rows +/*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; +INSERT INTO "dataset_managing_organization" ("id", "name", "ror_id", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', '00000000-0000-0000-0000-000000000001'), + ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_other +CREATE TABLE IF NOT EXISTS "dataset_other" ( + "id" CHAR(36) NOT NULL, + "language" VARCHAR NOT NULL, + "managing_organization_name" VARCHAR NOT NULL, + "managing_organization_ror_id" VARCHAR NOT NULL, + "size" VARCHAR[] NOT NULL, + "standards_followed" VARCHAR NOT NULL, + "acknowledgement" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_other_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_other: -1 rows +/*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; +INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "managing_organization_ror_id", "size", "standards_followed", "acknowledgement", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000002', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000002'), + ('2fca4640-6f0e-406c-8c7a-e93a0740b9c6', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org', 'NA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_readme +CREATE TABLE IF NOT EXISTS "dataset_readme" ( + "id" CHAR(36) NOT NULL, + "content" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_readme: -1 rows +/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_record_keys +CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( + "id" CHAR(36) NOT NULL, + "key_type" VARCHAR NOT NULL, + "key_details" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_record_keys_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_record_keys: -1 rows +/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; +INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES + ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), + ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_related_item +CREATE TABLE IF NOT EXISTS "dataset_related_item" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "relation_type" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_related_item: -1 rows +/*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; +INSERT INTO "dataset_related_item" ("id", "type", "relation_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'main', 'main', '00000000-0000-0000-0000-000000000002'), + ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_related_item_contributor +CREATE TABLE IF NOT EXISTS "dataset_related_item_contributor" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "name_type" VARCHAR NOT NULL, + "creator" BOOLEAN NOT NULL, + "contributor_type" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_contributor_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_related_item_contributor: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; +INSERT INTO "dataset_related_item_contributor" ("id", "name", "name_type", "creator", "contributor_type", "dataset_related_item_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'string', 'true', 'owner', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_related_item_identifier +CREATE TABLE IF NOT EXISTS "dataset_related_item_identifier" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "metadata_scheme" VARCHAR NOT NULL, + "scheme_uri" VARCHAR NOT NULL, + "scheme_type" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_identifier_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_related_item_identifier: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_identifier" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_identifier" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_related_item_other +CREATE TABLE IF NOT EXISTS "dataset_related_item_other" ( + "id" CHAR(36) NOT NULL, + "publication_year" VARCHAR NOT NULL, + "volume" VARCHAR NOT NULL, + "issue" VARCHAR NOT NULL, + "number_value" VARCHAR NOT NULL, + "number_type" VARCHAR NOT NULL, + "first_page" VARCHAR NOT NULL, + "last_page" BOOLEAN NOT NULL, + "publisher" VARCHAR NOT NULL, + "edition" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_other_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_related_item_other: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_other" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_other" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_related_item_title +CREATE TABLE IF NOT EXISTS "dataset_related_item_title" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "title" VARCHAR NOT NULL, + "dataset_related_item_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_related_item_title_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_related_item_title: -1 rows +/*!40000 ALTER TABLE "dataset_related_item_title" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_title" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_rights +CREATE TABLE IF NOT EXISTS "dataset_rights" ( + "id" CHAR(36) NOT NULL, + "rights" VARCHAR NOT NULL, + "uri" VARCHAR NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_scheme" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_rights_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_rights: -1 rows +/*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; +INSERT INTO "dataset_rights" ("id", "rights", "uri", "identifier", "identifier_scheme", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'), + ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_subject +CREATE TABLE IF NOT EXISTS "dataset_subject" ( + "id" CHAR(36) NOT NULL, + "subject" VARCHAR NOT NULL, + "scheme" VARCHAR NOT NULL, + "scheme_uri" VARCHAR NOT NULL, + "value_uri" VARCHAR NOT NULL, + "classification_code" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_subject_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_subject: -1 rows +/*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; +INSERT INTO "dataset_subject" ("id", "subject", "scheme", "scheme_uri", "value_uri", "classification_code", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'), + ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; + +-- Dumping structure for table public.dataset_title +CREATE TABLE IF NOT EXISTS "dataset_title" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "dataset_id" VARCHAR NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_title_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.dataset_title: -1 rows +/*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; +INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES + ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; + +-- Dumping structure for table public.invited_study_contributor +CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( + "email_address" VARCHAR NOT NULL, + "permission" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + "invited_on" BIGINT NOT NULL, + PRIMARY KEY ("email_address", "study_id"), + CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.invited_study_contributor: -1 rows +/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; +INSERT INTO "invited_study_contributor" ("email_address", "permission", "study_id", "invited_on") VALUES + ('Aliya_Herman@yahoo.com', 'editor', '00000000-0000-0000-0000-000000000001', 1693805470), + ('Anastacio50@hotmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), + ('Edward0@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), + ('Jailyn17@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000002', 1693805470); +/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; + +-- Dumping structure for table public.participant +CREATE TABLE IF NOT EXISTS "participant" ( + "id" CHAR(36) NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "address" VARCHAR NOT NULL, + "age" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "participant_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.participant: -1 rows +/*!40000 ALTER TABLE "participant" DISABLE KEYS */; +INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "study_id", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', '00000000-0000-0000-0000-000000000002', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', '00000000-0000-0000-0000-000000000003', 1693805470, 1693805470), + ('921ba857-dd08-4149-8f5c-245c6c93ef84', 'aydan1', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('458d2c15-6ed8-4f70-a47d-70b42f2f1b86', 'aydan1', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('35750167-40c5-4f4a-9d8e-ebe89c2efcfc', 'aydan1', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('b444520d-0eac-4065-a86d-004481f68d8a', 'aydan1', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('88c7592a-4382-4d6b-a197-e880e49db3c0', 'aydan1', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('ba73ed99-6ec2-46e0-acdb-4a00c31dd572', 'aydan', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('c1f24707-e909-45e5-9b44-fd35c0ad62be', 'bhavesh', 'patel', '3904 university ave', '20', '00000000-0000-0000-0000-000000000001', 1694032113, 1694032113); +/*!40000 ALTER TABLE "participant" ENABLE KEYS */; + + + +-- Dumping structure for table public.user +CREATE TABLE IF NOT EXISTS "user" ( + "id" CHAR(36) NOT NULL, + "email_address" VARCHAR NOT NULL, + "username" VARCHAR NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "orcid" VARCHAR NOT NULL, + "hash" VARCHAR NOT NULL, + "institution" VARCHAR NOT NULL, + "created_at" BIGINT NOT NULL, + PRIMARY KEY ("id") +); + +-- Dumping data for table public.user: -1 rows +/*!40000 ALTER TABLE "user" DISABLE KEYS */; +INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "institution", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', 'Schinner, Kuvalis and Beatty', 1693805470), + ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', 'Schmitt Inc', 1693805470), + ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', 'Stracke, Leuschke and Kuvalis', 1693805470), + ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', 'Heidenreich, Wilkinson and Mitchell', 1693805470), + ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', 'Heaney, Russel and Turner', 1693805470); +/*!40000 ALTER TABLE "user" ENABLE KEYS */; + + +-- Dumping structure for table public.study_arm +CREATE TABLE IF NOT EXISTS "study_arm" ( + "id" CHAR(36) NOT NULL, + "label" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "intervention_list" VARCHAR[] NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_arm_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_arm: -1 rows +/*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; +INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), + ('3fa464ca-6701-4a75-ab84-c26f3d3f49be', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), + ('527b87cc-55e5-4e39-ada6-1ed738cdde47', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), + ('3d2189e8-e95b-4d1b-ac1e-b0716bbe9eb4', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), + ('47c1c51b-f145-4b7a-af99-f05eb0feb133', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), + ('50278410-a4ca-4e0b-bff0-632f9a1c447a', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), + ('cb555a08-5387-4d34-b397-1ddd10fec0b9', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), + ('038bb56d-2b8b-483a-a974-3612fc52b2a3', 'arm2', 'Experimental', 'Lorem Ipsum', '{inter1,"intervention 2"}', '00000000-0000-0000-0000-000000000001'), + ('173c6350-ba74-47fd-ae34-f39e2c4901ab', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), + ('91dca128-d30d-41e3-8115-2a548b029e04', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; + +-- Dumping structure for table public.study_available_ipd +CREATE TABLE IF NOT EXISTS "study_available_ipd" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "url" VARCHAR NOT NULL, + "comment" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_available_ipd_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_available_ipd: -1 rows +/*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; +INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'AS2655AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'AS625AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; + +-- Dumping structure for table public.study_contact +CREATE TABLE IF NOT EXISTS "study_contact" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "affiliation" VARCHAR NOT NULL, + "role" VARCHAR NULL DEFAULT NULL, + "phone" VARCHAR NOT NULL, + "phone_ext" VARCHAR NOT NULL, + "email_address" VARCHAR NOT NULL, + "central_contact" BOOLEAN NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_contact_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_contact: -1 rows +/*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; +INSERT INTO "study_contact" ("id", "name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', 'true', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'Lela', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'Verner', 'Monahan and Sons', NULL, '501-039-841', '', 'Verner19@yahoo.com', 'false', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', 'false', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; + +-- Dumping structure for table public.study_contributor +CREATE TABLE IF NOT EXISTS "study_contributor" ( + "permission" VARCHAR NOT NULL, + "user_id" CHAR(36) NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("user_id", "study_id"), + CONSTRAINT "study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_contributor_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_contributor: -1 rows +/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; +INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'), + ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('viewer', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002'), + ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000003'), + ('viewer', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000003'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000005'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007'), + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000008'); +/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; + +-- Dumping structure for table public.study_description +CREATE TABLE IF NOT EXISTS "study_description" ( + "id" CHAR(36) NOT NULL, + "brief_summary" VARCHAR NOT NULL, + "detailed_description" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_description_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_description: -1 rows +/*!40000 ALTER TABLE "study_description" DISABLE KEYS */; +INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'study summary', 'big description', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'study summary', 'big description', '00000000-0000-0000-0000-000000000003'), + ('d083b544-b359-4210-9f5f-d5de7b3dce1e', 'study summary', 'big description', '00000000-0000-0000-0000-000000000002'), + ('bfc385dc-4391-41a5-9f93-65d96e60bfe4', '', '', 'e5a2a1d2-850f-465a-8fc1-6a1aec6d9e5a'), + ('052fe6d3-bb46-478c-b680-77f342594258', '', '', 'ec0064ca-4f34-48a8-9dcc-1377c7ca0a59'), + ('3ebd87eb-f790-41b9-9a73-9ba51a39f429', '', '', '995d703e-a6d0-4dc2-95e7-3ce868eb9fb7'), + ('58f8ee80-9d2b-4828-addf-03e072702423', '', '', 'f154f7c2-58a9-4b2e-808c-3d9a71dc99d2'), + ('e6256c6b-138a-438b-b6f3-916767dfb051', '', '', 'af2f9f5e-24eb-4b54-8fe1-ec76391b9af6'), + ('b66dae93-ca0e-4d8c-8078-d504b5ed6038', '', '', 'cb24e1c9-b4b2-451a-89b7-e73556d50ca2'), + ('3ca6a722-479c-4e79-b79a-702fe8f5ed06', '', '', '39a913b7-daad-4c86-ba07-9f6400c73a28'), + ('e8a2b389-781d-4e10-94a3-0a04b59771d7', '', '', '61ac1bdb-1809-4d20-8e29-22d3f2d85252'), + ('ab7a024f-c9f2-4021-b86c-1516025c4cc4', '', '', '626f2a7e-fa8f-459e-9076-a3b2082a00d2'), + ('a75786cc-06d8-4793-ae09-1a85a3cb4d26', '', '', 'ee3012e5-3c51-4d21-b16c-5ace1c80cf72'); +/*!40000 ALTER TABLE "study_description" ENABLE KEYS */; + +-- Dumping structure for table public.study_design +CREATE TABLE IF NOT EXISTS "study_design" ( + "id" CHAR(36) NOT NULL, + "design_allocation" VARCHAR NULL DEFAULT NULL, + "study_type" VARCHAR NOT NULL, + "design_intervention_model" VARCHAR NULL DEFAULT NULL, + "design_intervention_model_description" VARCHAR NULL DEFAULT NULL, + "design_primary_purpose" VARCHAR NULL DEFAULT NULL, + "design_masking" VARCHAR NULL DEFAULT NULL, + "design_masking_description" VARCHAR NULL DEFAULT NULL, + "design_who_masked_list" VARCHAR[] NULL DEFAULT NULL, + "phase_list" VARCHAR[] NULL DEFAULT NULL, + "enrollment_count" INTEGER NOT NULL, + "enrollment_type" VARCHAR NOT NULL, + "number_arms" INTEGER NULL DEFAULT NULL, + "design_observational_model_list" VARCHAR[] NULL DEFAULT NULL, + "design_time_perspective_list" VARCHAR[] NULL DEFAULT NULL, + "bio_spec_retention" VARCHAR NULL DEFAULT NULL, + "bio_spec_description" VARCHAR NULL DEFAULT NULL, + "target_duration" VARCHAR NULL DEFAULT NULL, + "number_groups_cohorts" INTEGER NULL DEFAULT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_design_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_design: -1 rows +/*!40000 ALTER TABLE "study_design" DISABLE KEYS */; +INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_intervention_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES + ('56228349-b6bb-45e3-8877-0ade34016bd9', NULL, 'Observational', NULL, NULL, NULL, NULL, NULL, '{"Billy sanders"}', NULL, 20, 'Actual', NULL, '{Cohort}', '{Retrospective}', 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000005'), + ('d1a85ba2-2f1b-4360-b60d-4d2996b57aa4', NULL, 'Interventional', NULL, NULL, NULL, NULL, NULL, '{"Billy sanders"}', NULL, 20, 'Actual', NULL, '{Cohort}', '{Retrospective}', 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000001'), + ('1258a00d-5c9a-4e8a-907f-79a39f4b21e8', NULL, 'Observational', NULL, NULL, NULL, NULL, NULL, '{"Billy sanders"}', NULL, 20, 'Actual', NULL, '{Cohort}', '{Retrospective}', 'None Retained', 'description', '5 Days', 30, 'd925991e-af73-4fa2-ab2a-7040140a57df'), + ('00000000-0000-0000-0000-000000000002', NULL, 'Interventional', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 20, 'Actual', NULL, '{Cohort}', '{Retrospective}', 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000007'), + ('764cb716-a9fa-49b8-a9eb-7b948ff1d835', NULL, 'Interventional', NULL, NULL, NULL, NULL, NULL, '{"Billy sanders"}', NULL, 20, 'Actual', NULL, '{Cohort}', '{Retrospective}', 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000002'), + ('efa13194-2320-42d6-8348-0db63de6696b', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, 'f154f7c2-58a9-4b2e-808c-3d9a71dc99d2'), + ('a6d0f1ba-2f9d-4ce4-a0c8-c12315975d04', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, 'af2f9f5e-24eb-4b54-8fe1-ec76391b9af6'), + ('0966fc6a-0e32-4989-bd59-63956cfec02e', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, 'cb24e1c9-b4b2-451a-89b7-e73556d50ca2'), + ('a4dcaf28-8d81-4fcb-a69e-0ca664b5a84d', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, '39a913b7-daad-4c86-ba07-9f6400c73a28'), + ('8ced9a54-e88c-4f74-90c8-2d84c6401dda', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, '61ac1bdb-1809-4d20-8e29-22d3f2d85252'), + ('1f886857-2094-4f2c-8d5d-55ee1805db11', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, '626f2a7e-fa8f-459e-9076-a3b2082a00d2'), + ('f0902357-928c-47a7-8263-97985bc344be', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, 'ee3012e5-3c51-4d21-b16c-5ace1c80cf72'); +/*!40000 ALTER TABLE "study_design" ENABLE KEYS */; + +-- Dumping structure for table public.study_eligibility +CREATE TABLE IF NOT EXISTS "study_eligibility" ( + "id" CHAR(36) NOT NULL, + "gender" VARCHAR NOT NULL, + "gender_based" VARCHAR NOT NULL, + "gender_description" VARCHAR NOT NULL, + "minimum_age_value" INTEGER NOT NULL, + "maximum_age_value" INTEGER NOT NULL, + "minimum_age_unit" VARCHAR NOT NULL, + "maximum_age_unit" VARCHAR NOT NULL, + "healthy_volunteers" VARCHAR NOT NULL, + "inclusion_criteria" VARCHAR[] NOT NULL, + "exclusion_criteria" VARCHAR[] NOT NULL, + "study_population" VARCHAR NULL DEFAULT NULL, + "sampling_method" VARCHAR NULL DEFAULT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_eligibility_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_eligibility: 2 rows +/*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; +INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "minimum_age_value", "maximum_age_value", "minimum_age_unit", "maximum_age_unit", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id") VALUES + ('aa2de03c-d062-4e2a-93a7-4e3ac0ed2629', 'All', 'Yes', 'Description', 24, 34, 'Years', 'Years', 'selected', '{"inclusion 1"}', '{"exclusion 1"}', 'Description', 'Probability Sample', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'All', 'Yes', 'Description', 24, 34, 'Years', 'Years', 'selected', '{"inclusion 1"}', '{"exclusion 1"}', 'Description', 'Probability Sample', '00000000-0000-0000-0000-000000000001'), + ('abace007-f4d5-470d-b60e-b947c0b8a28a', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', '995d703e-a6d0-4dc2-95e7-3ce868eb9fb7'), + ('e0778c32-9097-453e-836f-4d7f0fc30d01', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', 'f154f7c2-58a9-4b2e-808c-3d9a71dc99d2'), + ('ea3f8c85-bfa1-4cf6-81d3-bde9d79bf0c2', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', 'af2f9f5e-24eb-4b54-8fe1-ec76391b9af6'), + ('5fbae118-430d-4d1a-b88b-61b2feed08fe', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', 'cb24e1c9-b4b2-451a-89b7-e73556d50ca2'), + ('a41f2a42-2372-4bbb-9aea-83298b3fc855', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', '39a913b7-daad-4c86-ba07-9f6400c73a28'), + ('23df9c4f-c939-4548-88b6-111f95bb2ed0', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', '61ac1bdb-1809-4d20-8e29-22d3f2d85252'), + ('b3be3fff-eeec-4c2c-91dc-9bce6dff1fc7', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', '626f2a7e-fa8f-459e-9076-a3b2082a00d2'), + ('624a91d1-980b-4bf0-b2e6-dbdd1e5ec2c2', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', 'ee3012e5-3c51-4d21-b16c-5ace1c80cf72'); +/*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; + +-- Dumping structure for table public.study_identification +CREATE TABLE IF NOT EXISTS "study_identification" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "identifier_type" VARCHAR NOT NULL, + "identifier_domain" VARCHAR NOT NULL, + "identifier_link" VARCHAR NOT NULL, + "secondary" BOOLEAN NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_identification_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_identification: -1 rows +/*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; +INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000005', 'ADF897ADS', 'NIH Grant Number', 'domain', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000002'), + ('d70c6003-1a9d-4ee2-adca-3250dd1ae50a', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; + +-- Dumping structure for table public.study_intervention +CREATE TABLE IF NOT EXISTS "study_intervention" ( + "id" CHAR(36) NOT NULL, + "type" VARCHAR NOT NULL, + "name" VARCHAR NOT NULL, + "description" VARCHAR NOT NULL, + "arm_group_label_list" VARCHAR[] NOT NULL, + "other_name_list" VARCHAR[] NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_intervention_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_intervention: -1 rows +/*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; +INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; + +-- Dumping structure for table public.study_ipdsharing +CREATE TABLE IF NOT EXISTS "study_ipdsharing" ( + "id" CHAR(36) NOT NULL, + "ipd_sharing" VARCHAR NOT NULL, + "ipd_sharing_description" VARCHAR NOT NULL, + "ipd_sharing_info_type_list" VARCHAR[] NOT NULL, + "ipd_sharing_time_frame" VARCHAR NOT NULL, + "ipd_sharing_access_criteria" VARCHAR NOT NULL, + "ipd_sharing_url" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_ipdsharing_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_ipdsharing: -1 rows +/*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; +INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Yes', 'Lorem Ipsum', '{"Study Protocol"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), + ('54ba9f80-106e-4dda-a507-cbc66a6b6b48', 'Yes', 'Lorem Ipsum', '{"Study Protocol"}', 'January 2025', 'No criteria updated', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'), + ('dd98f3a8-76e6-4cd5-9d7f-9a274dc8edff', '', '', '{}', '', '', '', 'ec0064ca-4f34-48a8-9dcc-1377c7ca0a59'), + ('02ac873b-7cc0-4725-a307-182ccbe3eadd', '', '', '{}', '', '', '', '995d703e-a6d0-4dc2-95e7-3ce868eb9fb7'), + ('45bccc13-add0-4fc4-ab85-58c94ce27247', '', '', '{}', '', '', '', 'f154f7c2-58a9-4b2e-808c-3d9a71dc99d2'), + ('b8b90e0c-058e-4c26-a587-5bb3f6782a8c', '', '', '{}', '', '', '', 'af2f9f5e-24eb-4b54-8fe1-ec76391b9af6'), + ('cd263b0d-03e8-4f2d-b6c2-7ef1944015ab', '', '', '{}', '', '', '', 'cb24e1c9-b4b2-451a-89b7-e73556d50ca2'), + ('2be4bdfa-e187-4767-a903-a6e45d349a94', '', '', '{}', '', '', '', '39a913b7-daad-4c86-ba07-9f6400c73a28'), + ('d71f1e8b-42a7-4814-a80c-27418f76b79d', '', '', '{}', '', '', '', '61ac1bdb-1809-4d20-8e29-22d3f2d85252'), + ('c98e3f78-89f8-48d4-b974-6ac1e80bc44e', '', '', '{}', '', '', '', '626f2a7e-fa8f-459e-9076-a3b2082a00d2'), + ('b37f781e-55d2-47e9-8f7e-6d1ab21ce96b', '', '', '{}', '', '', '', 'ee3012e5-3c51-4d21-b16c-5ace1c80cf72'); +/*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; + +-- Dumping structure for table public.study_link +CREATE TABLE IF NOT EXISTS "study_link" ( + "id" CHAR(36) NOT NULL, + "url" VARCHAR NOT NULL, + "title" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_link_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_link: -1 rows +/*!40000 ALTER TABLE "study_link" DISABLE KEYS */; +INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_link" ENABLE KEYS */; + +-- Dumping structure for table public.study_location +CREATE TABLE IF NOT EXISTS "study_location" ( + "id" CHAR(36) NOT NULL, + "facility" VARCHAR NOT NULL, + "status" VARCHAR NOT NULL, + "city" VARCHAR NOT NULL, + "state" VARCHAR NOT NULL, + "zip" VARCHAR NOT NULL, + "country" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_location_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_location: -1 rows +/*!40000 ALTER TABLE "study_location" DISABLE KEYS */; +INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_location" ENABLE KEYS */; + +-- Dumping structure for table public.study_other +CREATE TABLE IF NOT EXISTS "study_other" ( + "id" CHAR(36) NOT NULL, + "oversight_has_dmc" BOOLEAN NOT NULL, + "conditions" VARCHAR[] NOT NULL, + "keywords" VARCHAR[] NOT NULL, + "size" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_other_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_other: -1 rows +/*!40000 ALTER TABLE "study_other" DISABLE KEYS */; +INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'true', '{"condition 1"}', '{"keyword 1"}', '1 GB', '00000000-0000-0000-0000-000000000001'), + ('7a1217d6-6e58-432d-b747-36e1dec81499', 'true', '{conditionupdate}', '{"keyword 1"}', '3 GB', '00000000-0000-0000-0000-000000000002'), + ('837dce97-4073-4c4a-8d65-d1b7c87f92c6', 'false', '{}', '{}', '', 'e5a2a1d2-850f-465a-8fc1-6a1aec6d9e5a'), + ('a651f9b1-3db4-4dae-a486-e9f7f7b5a5cb', 'false', '{}', '{}', '', 'ec0064ca-4f34-48a8-9dcc-1377c7ca0a59'), + ('ccd3e31d-9e45-4329-9c89-2e7c7fa0d53b', 'false', '{}', '{}', '', '995d703e-a6d0-4dc2-95e7-3ce868eb9fb7'), + ('b2176d5e-6c1e-4b4d-91a4-b271b042c88c', 'false', '{}', '{}', '', 'f154f7c2-58a9-4b2e-808c-3d9a71dc99d2'), + ('43bcce4d-92d9-4b0f-9844-1885796a295f', 'false', '{}', '{}', '', 'af2f9f5e-24eb-4b54-8fe1-ec76391b9af6'), + ('22195a93-d2cd-4a0b-80d0-f1da5bcfd215', 'false', '{}', '{}', '', 'cb24e1c9-b4b2-451a-89b7-e73556d50ca2'), + ('5fd15009-c8ff-4070-bcef-ae969713bfd4', 'false', '{}', '{}', '', '39a913b7-daad-4c86-ba07-9f6400c73a28'), + ('6f759a30-742b-4c8a-9eca-a52efdb279e6', 'false', '{}', '{}', '', '61ac1bdb-1809-4d20-8e29-22d3f2d85252'), + ('2f1197a1-bd0c-42fb-a680-46242ebf8e80', 'false', '{}', '{}', '', '626f2a7e-fa8f-459e-9076-a3b2082a00d2'), + ('dfa22eea-c4d7-4cdc-84b7-5023a145354e', 'false', '{}', '{}', '', 'ee3012e5-3c51-4d21-b16c-5ace1c80cf72'); +/*!40000 ALTER TABLE "study_other" ENABLE KEYS */; + +-- Dumping structure for table public.study_overall_official +CREATE TABLE IF NOT EXISTS "study_overall_official" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "affiliation" VARCHAR NOT NULL, + "role" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_overall_official_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_overall_official: -1 rows +/*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; +INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'), + ('b1683ba3-26ca-42c5-a257-1974dbbf4f8b', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'), + ('319c21f2-9441-48ec-a64c-ab839a1da2a3', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; + +-- Dumping structure for table public.study_reference +CREATE TABLE IF NOT EXISTS "study_reference" ( + "id" CHAR(36) NOT NULL, + "identifier" VARCHAR NOT NULL, + "type" VARCHAR NOT NULL, + "citation" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_reference_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_reference: -1 rows +/*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; +INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'Yes', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'PMID1A2234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; + +-- Dumping structure for table public.study_sponsors_collaborators +CREATE TABLE IF NOT EXISTS "study_sponsors_collaborators" ( + "id" CHAR(36) NOT NULL, + "responsible_party_type" VARCHAR NOT NULL, + "responsible_party_investigator_name" VARCHAR NOT NULL, + "responsible_party_investigator_title" VARCHAR NOT NULL, + "responsible_party_investigator_affiliation" VARCHAR NOT NULL, + "lead_sponsor_name" VARCHAR NOT NULL, + "collaborator_name" VARCHAR[] NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_sponsors_collaborators_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_sponsors_collaborators: -1 rows +/*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; +INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name", "collaborator_name", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Principal Investigator', 'Sean West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', '{UCSD}', '00000000-0000-0000-0000-000000000001'), + ('9b92a2db-e864-4a5d-8a88-91a3d8a48b74', 'Principal Investigatorup updated version', 'Sean West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', '{"UCSD updated"}', '00000000-0000-0000-0000-000000000002'), + ('9fcde425-1d11-4d31-bfc2-edc022871876', '', '', '', '', '', '{}', 'f154f7c2-58a9-4b2e-808c-3d9a71dc99d2'), + ('cbb8562c-ac36-481d-b67c-5d69d7ae83cd', '', '', '', '', '', '{}', 'af2f9f5e-24eb-4b54-8fe1-ec76391b9af6'), + ('4d5361ab-ce94-4f94-92ed-0fe6c47b2f3a', '', '', '', '', '', '{}', 'cb24e1c9-b4b2-451a-89b7-e73556d50ca2'), + ('51e75430-25cd-42fa-953e-3724090942f5', '', '', '', '', '', '{}', '39a913b7-daad-4c86-ba07-9f6400c73a28'), + ('75f7e616-4565-44e9-99de-a7e4afeaa06d', '', '', '', '', '', '{}', '61ac1bdb-1809-4d20-8e29-22d3f2d85252'), + ('41f99b90-3a02-49e3-af56-c29074e28cef', '', '', '', '', '', '{}', '626f2a7e-fa8f-459e-9076-a3b2082a00d2'), + ('ee92b6a8-f840-4db0-9184-54a488006227', '', '', '', '', '', '{}', 'ee3012e5-3c51-4d21-b16c-5ace1c80cf72'); +/*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; + +-- Dumping structure for table public.study_status +CREATE TABLE IF NOT EXISTS "study_status" ( + "id" CHAR(36) NOT NULL, + "overall_status" VARCHAR NOT NULL, + "why_stopped" VARCHAR NOT NULL, + "start_date" TIMESTAMP NULL DEFAULT NULL, + "start_date_type" VARCHAR NOT NULL, + "completion_date" TIMESTAMP NULL DEFAULT NULL, + "completion_date_type" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_status: 2 rows +/*!40000 ALTER TABLE "study_status" DISABLE KEYS */; +INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES + ('b25cc3e6-f9b2-4c20-ba57-d82313d41df5', 'Suspended', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'Recruiting new', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Anticipated', '00000000-0000-0000-0000-000000000001'), + ('30f39019-ff68-4b7e-b058-23ac64172e1d', '', '', NULL, '', NULL, '', 'ee3012e5-3c51-4d21-b16c-5ace1c80cf72'); +/*!40000 ALTER TABLE "study_status" ENABLE KEYS */; + +-- Dumping structure for table public.version +CREATE TABLE IF NOT EXISTS "version" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "published" BOOLEAN NOT NULL, + "changelog" VARCHAR NOT NULL, + "doi" VARCHAR NOT NULL, + "published_on" TIMESTAMP NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + "updated_on" BIGINT NOT NULL, + "created_at" BIGINT NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "version_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.version: -1 rows +/*!40000 ALTER TABLE "version" DISABLE KEYS */; +INSERT INTO "version" ("id", "title", "published", "changelog", "doi", "published_on", "dataset_id", "updated_on", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000002', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000004', 'Version 1', 'false', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003', 1693805470, 1693805470); +/*!40000 ALTER TABLE "version" ENABLE KEYS */; + +-- Dumping structure for table public.version_participants +CREATE TABLE IF NOT EXISTS "version_participants" ( + "dataset_version_id" CHAR(36) NOT NULL, + "participant_id" CHAR(36) NOT NULL, + PRIMARY KEY ("dataset_version_id", "participant_id"), + CONSTRAINT "version_participants_dataset_version_id_fkey" FOREIGN KEY ("dataset_version_id") REFERENCES "version" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "version_participants_participant_id_fkey" FOREIGN KEY ("participant_id") REFERENCES "participant" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.version_participants: -1 rows +/*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; +INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; + +/*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; +/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; +/*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40111 SET SQL_NOTES=IFNULL(@OLD_SQL_NOTES, 1) */; + +COMMIT; \ No newline at end of file From 1902586fb9985f3db27ef705685b7be0336f5707 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 6 Sep 2023 16:54:04 -0700 Subject: [PATCH 107/505] fix: fix study_status is created when study is initiated --- apis/__init__.py | 38 ---------------------------- model/study.py | 8 +++--- model/study_metadata/study_status.py | 8 +++--- 3 files changed, 7 insertions(+), 47 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index df047dc1..47090630 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -77,41 +77,3 @@ def get(self): api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) -# -# api.add_namespace(arm) -# api.add_namespace(available_ipd) -# api.add_namespace(contact) -# api.add_namespace(description) -# api.add_namespace(design) -# api.add_namespace(eligibility) -# api.add_namespace(identification) -# api.add_namespace(intervention) -# api.add_namespace(ipdsharing) -# api.add_namespace(link) -# api.add_namespace(location) -# api.add_namespace(other) -# api.add_namespace(overall_official) -# api.add_namespace(reference) -# api.add_namespace(sponsors_collaborator) -# api.add_namespace(status) -# -# -# api.add_namespace(access) -# api.add_namespace(funder) -# api.add_namespace(consent) -# api.add_namespace(subject) -# api.add_namespace(description) -# api.add_namespace(identifier) -# api.add_namespace(dataset_other) -# api.add_namespace(date) -# api.add_namespace(de_ident_level) -# api.add_namespace(managing_organization) -# api.add_namespace(readme) -# api.add_namespace(record_keys) -# api.add_namespace(rights) -# api.add_namespace(title) -# api.add_namespace(related_item) -# # api.add_namespace(related_item_title) -# # api.add_namespace(related_item_contributor) -# # api.add_namespace(related_item_identifier) -# # api.add_namespace(related_item_other) diff --git a/model/study.py b/model/study.py index 5d53a92f..35c3c746 100644 --- a/model/study.py +++ b/model/study.py @@ -1,10 +1,8 @@ import uuid -from datetime import datetime -import datetime - +from datetime import timezone import model from .db import db -from datetime import timezone +import datetime class Study(db.Model): @@ -14,7 +12,7 @@ def __init__(self): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() # - # self.study_status = model.StudyStatus(self) + self.study_status = model.StudyStatus(self) self.study_sponsors_collaborators = model.StudySponsorsCollaborators(self) self.study_design = model.StudyDesign(self) self.study_eligibility = model.StudyEligibility(self) diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index cf536d87..6e56b338 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -11,9 +11,9 @@ def __init__(self, study): self.study = study self.overall_status = "" self.why_stopped = "" - self.start_date = "" + self.start_date = None self.start_date_type = "" - self.completion_date = "" + self.completion_date = None self.completion_date_type = "" __tablename__ = "study_status" @@ -21,9 +21,9 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) overall_status = db.Column(db.String, nullable=False) why_stopped = db.Column(db.String, nullable=False) - start_date = db.Column(db.DateTime, nullable=False) + start_date = db.Column(db.DateTime, nullable=True) start_date_type = db.Column(db.String, nullable=False) - completion_date = db.Column(db.DateTime, nullable=False) + completion_date = db.Column(db.DateTime, nullable=True) completion_date_type = db.Column(db.String, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) From 242c0c43cb2ab493f57830ceba55956b0a371a53 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 6 Sep 2023 17:10:04 -0700 Subject: [PATCH 108/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20db:=20update=20?= =?UTF-8?q?init.sql?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sql/init.sql | 100 +++++++++++++++++++++++++-------------------------- 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/sql/init.sql b/sql/init.sql index b3845c1d..5cc5457b 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -12,8 +12,8 @@ CREATE TABLE IF NOT EXISTS "study" ( "id" CHAR(36) NOT NULL, "title" VARCHAR NOT NULL, "image" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "updated_on" TIMESTAMP NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, PRIMARY KEY ("id") ); @@ -26,7 +26,7 @@ CREATE TABLE IF NOT EXISTS "user" ( "last_name" VARCHAR NOT NULL, "orcid" VARCHAR NOT NULL, "hash" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, + "created_at" BIGINT NOT NULL, "institution" VARCHAR NOT NULL, PRIMARY KEY ("id") ); @@ -45,7 +45,7 @@ CREATE TABLE IF NOT EXISTS "study_contributor" ( CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( "email_address" VARCHAR NOT NULL, "permission" VARCHAR NOT NULL, - "invited_on" TIMESTAMP NOT NULL, + "invited_on" BIGINT NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("email_address", "study_id"), CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION @@ -54,8 +54,8 @@ CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( -- Dumping structure for table public.dataset CREATE TABLE IF NOT EXISTS "dataset" ( "id" CHAR(36) NOT NULL, - "updated_on" TIMESTAMP NOT NULL, - "created_at" TIMESTAMP NOT NULL, + "updated_on" BIGINT NOT NULL, + "created_at" BIGINT NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "dataset_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION @@ -324,8 +324,8 @@ CREATE TABLE IF NOT EXISTS "participant" ( "last_name" VARCHAR NOT NULL, "address" VARCHAR NOT NULL, "age" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "updated_on" TIMESTAMP NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "participant_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION @@ -558,10 +558,10 @@ CREATE TABLE IF NOT EXISTS "version" ( "title" VARCHAR NOT NULL, "published" BOOLEAN NOT NULL, "changelog" VARCHAR NOT NULL, - "updated_on" TIMESTAMP NOT NULL, + "updated_on" BIGINT NOT NULL, "doi" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL, - "published_on" TIMESTAMP NOT NULL, + "created_at" BIGINT NOT NULL, + "published_on" BIGINT NOT NULL, "dataset_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "dataset_version_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION @@ -580,25 +580,25 @@ CREATE TABLE IF NOT EXISTS "version_participants" ( -- done /*!40000 ALTER TABLE "study" DISABLE KEYS */; INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', '2023-08-13 12:33:10', '2023-08-13 12:33:11'), - ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', '2022-08-03 12:33:10', '2023-07-03 12:33:11'), - ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', '2016-08-03 12:33:10', '2023-02-03 12:33:11'), - ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', '2020-08-03 12:33:10', '2021-09-03 12:33:11'), - ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', '2021-08-03 12:33:10', '2023-05-03 12:33:11'), - ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', '2019-08-03 12:33:10', '2022-08-03 12:33:11'), - ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', '2020-08-03 12:33:10', '2023-03-03 12:33:11'), - ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', '2023-08-03 12:33:10', '2023-01-03 12:33:11'); + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', 1693957896, 1693957896); /*!40000 ALTER TABLE "study" ENABLE KEYS */; -- Dumping data for table public.user: -1 rows -- done /*!40000 ALTER TABLE "user" DISABLE KEYS */; INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES - ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', '2023-08-13 12:34:06', 'Schinner, Kuvalis and Beatty'), - ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', '2023-08-13 12:34:06', 'Schmitt Inc'), - ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', '2023-08-13 12:34:06', 'Stracke, Leuschke and Kuvalis'), - ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', '2023-08-13 12:34:06', 'Heidenreich, Wilkinson and Mitchell'), - ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', '2023-08-13 12:34:06', 'Heaney, Russel and Turner'); + ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', 1693957896, 'Schinner, Kuvalis and Beatty'), + ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', 1693957896, 'Schmitt Inc'), + ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', 1693957896, 'Stracke, Leuschke and Kuvalis'), + ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', 1693957896, 'Heidenreich, Wilkinson and Mitchell'), + ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', 1693957896, 'Heaney, Russel and Turner'); /*!40000 ALTER TABLE "user" ENABLE KEYS */; -- Dumping data for table public.study_contributor: -1 rows @@ -626,22 +626,22 @@ INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES -- done /*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES - ('Aliya_Herman@yahoo.com', 'editor', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Anastacio50@hotmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Edward0@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), - ('Jailyn17@gmail.com', 'viewer', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000002'); + ('Aliya_Herman@yahoo.com', 'editor', 1693957896, '00000000-0000-0000-0000-000000000001'), + ('Anastacio50@hotmail.com', 'viewer', 1693957896, '00000000-0000-0000-0000-000000000001'), + ('Edward0@gmail.com', 'viewer', 1693957896, '00000000-0000-0000-0000-000000000001'), + ('Jailyn17@gmail.com', 'viewer', 1693957896, '00000000-0000-0000-0000-000000000002'); /*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; -- Dumping data for table public.dataset: -1 rows -- done /*!40000 ALTER TABLE "dataset" DISABLE KEYS */; INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000005', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000006', '2023-08-13 16:23:48', '2023-08-13 16:23:49', '00000000-0000-0000-0000-000000000003'); + ('00000000-0000-0000-0000-000000000001', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000004', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000005', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000006', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000003'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; -- Dumping data for table public.dataset_access: -1 rows @@ -791,18 +791,18 @@ INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES -- Dumping data for table public.participant: -1 rows /*!40000 ALTER TABLE "participant" DISABLE KEYS */; INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', '2023-08-13 16:33:53', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000004'), - ('921ba857-dd08-4149-8f5c-245c6c93ef84', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:23.627034', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('458d2c15-6ed8-4f70-a47d-70b42f2f1b86', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:36.656094', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('35750167-40c5-4f4a-9d8e-ebe89c2efcfc', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:52.555088', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:42:59.614647', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('b444520d-0eac-4065-a86d-004481f68d8a', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:45:49.495595', '2023-08-13 16:33:54', '00000000-0000-0000-0000-000000000001'), - ('88c7592a-4382-4d6b-a197-e880e49db3c0', 'aydan1', 'gasimova', '1221d kibler drive', '20', '2023-08-29 13:46:17.682171', '2023-08-29 13:46:17.682171', '00000000-0000-0000-0000-000000000001'), - ('ba73ed99-6ec2-46e0-acdb-4a00c31dd572', 'aydan', 'gasimova', '1221d kibler drive', '20', '2023-08-29 15:08:03.758771', '2023-08-29 15:08:03.758771', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-13 16:33:53', '2023-08-29 15:09:04.323914', '00000000-0000-0000-0000-000000000001'), - ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '2023-08-29 15:15:35.891076', '2023-08-29 15:15:35.891076', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000003'), + ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000004'), + ('921ba857-dd08-4149-8f5c-245c6c93ef84', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('458d2c15-6ed8-4f70-a47d-70b42f2f1b86', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('35750167-40c5-4f4a-9d8e-ebe89c2efcfc', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('b444520d-0eac-4065-a86d-004481f68d8a', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('88c7592a-4382-4d6b-a197-e880e49db3c0', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('ba73ed99-6ec2-46e0-acdb-4a00c31dd572', 'aydan', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "participant" ENABLE KEYS */; -- Dumping data for table public.study_arm: -1 rows @@ -949,10 +949,10 @@ INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", -- done /*!40000 ALTER TABLE "version" DISABLE KEYS */; INSERT INTO "version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'Version 1', 'false', 'lorem ipsum', '2023-08-13 16:24:05', '2435464e643', '2023-08-13 16:23:59', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003'); + ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000004', 'Version 1', 'false', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000003'); /*!40000 ALTER TABLE "version" ENABLE KEYS */; -- Dumping data for table public.version_participants: -1 rows From 8f2934081b54903b4bbd29f46bd981ec6d550e2d Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 7 Sep 2023 12:00:19 -0700 Subject: [PATCH 109/505] fix: fix collabs returned as an array --- apis/study_metadata/study_sponsors_collaborators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index ff0d9157..390642a1 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -56,7 +56,7 @@ class StudyCollaboratorsResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name - return {"collaborator_name": study_collaborators_} + return study_collaborators_ def put(self, study_id: int): data = request.json From 39bd1fe618a1a48eab86691904c89e9eefa40a80 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 7 Sep 2023 12:41:24 -0700 Subject: [PATCH 110/505] fix: fix conditions returned as an array --- apis/study_metadata/study_other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 3370f458..4561388a 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -46,7 +46,7 @@ class StudyOversightResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc - return {"oversight_has_dmc": study_oversight_has_dmc} + return study_oversight_has_dmc def put(self, study_id: int): data = request.json From 543386be1614ca8349fb600f4b22335e10966267 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 7 Sep 2023 19:42:09 +0000 Subject: [PATCH 111/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 4561388a..d5c5e0c1 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -46,7 +46,7 @@ class StudyOversightResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc - return study_oversight_has_dmc + return study_oversight_has_dmc def put(self, study_id: int): data = request.json From 8c4f0b29338b2ee59e2fa2c3f064325c46801e96 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 7 Sep 2023 13:35:05 -0700 Subject: [PATCH 112/505] fix: fix arms returning array structure --- model/study_metadata/study_arm.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 12f0acfa..f9b434ba 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -25,15 +25,14 @@ def __init__(self, study): def to_dict(self): """Converts the study to a dictionary""" - return ( - { + return { "id": self.id, "label": self.label, "type": self.type, "description": str(self.description), "intervention_list": self.intervention_list, - }, - ) + } + @staticmethod def from_data(study, data): From 13371f2eaaf2db9e403bfc518ea9f8074328816d Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 7 Sep 2023 13:39:28 -0700 Subject: [PATCH 113/505] fix: metadata/available GET, POST and DELETE endpoints changed to metadata/available-ipd --- apis/study_metadata/study_available_ipd.py | 33 +++++++++++++++------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index d197d0be..43566c8b 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import Study, db, StudyAvailableIpd from flask import request - +from flask_restx import reqparse from apis.study_metadata_namespace import api study_available = api.model( @@ -16,18 +16,30 @@ ) -@api.route("/study//metadata/available") +@api.route("/study//metadata/available-ipd") class StudyAvailableResource(Resource): @api.doc("available") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(study_available) + #@api.marshal_with(study_available) def get(self, study_id: int): study_ = Study.query.get(study_id) study_available_ipd = study_.study_available_ipd return [s.to_dict() for s in study_available_ipd] + @api.doc("update available") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(study_available) def post(self, study_id: int): + # parser = reqparse.RequestParser() + # parser.add_argument("username", type=str, required=True) + # parser.add_argument("password", type=str, required=True) + # parser.add_argument("username", type=str, required=True) + # parser.add_argument("password", type=str, required=True) + # parser.add_argument("password", type=str, required=True) + # args = parser.parse_args() + data = request.json study_obj = Study.query.get(study_id) list_of_elements = [] @@ -43,10 +55,11 @@ def post(self, study_id: int): db.session.commit() return list_of_elements - @api.route("/study//metadata/available_ipd/") - class StudyAvailableIpdUpdate(Resource): - def put(self, study_id: int, available_ipd_id: int): - study_available_ipd_ = StudyAvailableIpd.query.get(available_ipd_id) - db.session.delete(study_available_ipd_) - db.session.commit() - return 204 + +@api.route("/study//metadata/available-ipd/") +class StudyLocationUpdate(Resource): + def delete(self, study_id: int, available_ipd_id: int): + study_available_ = StudyAvailableIpd.query.get(available_ipd_id) + db.session.delete(study_available_) + db.session.commit() + return 204 From 6f994ddc1d5e43419304cc648f0662815b0f787f Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 7 Sep 2023 13:40:07 -0700 Subject: [PATCH 114/505] fix: fixed healthy volunteers in study design --- model/study_metadata/study_eligibility.py | 2 +- sql/init_timezones.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index c533387e..a5e1aadd 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -33,7 +33,7 @@ def __init__(self, study): maximum_age_value = db.Column(db.Integer, nullable=False) minimum_age_unit = db.Column(db.String, nullable=False) maximum_age_unit = db.Column(db.String, nullable=False) - healthy_volunteers = db.Column(db.String, nullable=False) + healthy_volunteers = db.Column(db.String, nullable=True) inclusion_criteria = db.Column(ARRAY(String), nullable=False) exclusion_criteria = db.Column(ARRAY(String), nullable=False) study_population = db.Column(db.String, nullable=True) diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 67653f89..1aa2758a 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -732,7 +732,7 @@ CREATE TABLE IF NOT EXISTS "study_eligibility" ( "maximum_age_value" INTEGER NOT NULL, "minimum_age_unit" VARCHAR NOT NULL, "maximum_age_unit" VARCHAR NOT NULL, - "healthy_volunteers" VARCHAR NOT NULL, + "healthy_volunteers" VARCHAR DEFAULT NULL, "inclusion_criteria" VARCHAR[] NOT NULL, "exclusion_criteria" VARCHAR[] NOT NULL, "study_population" VARCHAR NULL DEFAULT NULL, From 39a4b8bcb452ff6392b560c1c0093abb7b5ae6ba Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 7 Sep 2023 20:42:15 +0000 Subject: [PATCH 115/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_available_ipd.py | 2 +- model/study_metadata/study_arm.py | 13 ++++++------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 43566c8b..8bcc185d 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -21,7 +21,7 @@ class StudyAvailableResource(Resource): @api.doc("available") @api.response(200, "Success") @api.response(400, "Validation Error") - #@api.marshal_with(study_available) + # @api.marshal_with(study_available) def get(self, study_id: int): study_ = Study.query.get(study_id) study_available_ipd = study_.study_available_ipd diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index f9b434ba..218e2693 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -26,13 +26,12 @@ def __init__(self, study): def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, - "label": self.label, - "type": self.type, - "description": str(self.description), - "intervention_list": self.intervention_list, - } - + "id": self.id, + "label": self.label, + "type": self.type, + "description": str(self.description), + "intervention_list": self.intervention_list, + } @staticmethod def from_data(study, data): From 35388e8f570680108f632f8b9d5741ee11cfc81e Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 7 Sep 2023 14:14:50 -0700 Subject: [PATCH 116/505] fix: metadata/reference GET endpoints changed to string --- apis/study_metadata/study_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index fcb5e062..7a163614 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -11,7 +11,7 @@ { "id": fields.String(required=True), "identifier": fields.String(required=True), - "type": fields.Boolean(required=True), + "type": fields.String(required=True), "title": fields.String(required=True), "citation": fields.String(required=True), }, From bba04be879eb2379ea8476139988728821d6d717 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 8 Sep 2023 12:55:56 -0700 Subject: [PATCH 117/505] fix: metadata/central-contact POST endpoint from UI --- apis/study_metadata/study_contact.py | 2 +- model/study_metadata/study_contact.py | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 539c837b..05084e34 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -39,7 +39,7 @@ def post(self, study_id: int): study_contact_.update(i) list_of_elements.append(study_contact_.to_dict()) elif "id" not in i or not i["id"]: - study_contact_ = StudyContact.from_data(study_obj, i) + study_contact_ = StudyContact.from_data(study_obj, i, None, True) db.session.add(study_contact_) list_of_elements.append(study_contact_.to_dict()) db.session.commit() diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index f4dbed72..e1675db3 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -5,10 +5,11 @@ class StudyContact(db.Model): """A study is a collection of datasets and participants""" - def __init__(self, study): + def __init__(self, study, role, central_contact): self.id = str(uuid.uuid4()) self.study = study - + self.role = role + self.central_contact = central_contact __tablename__ = "study_contact" id = db.Column(db.CHAR(36), primary_key=True) @@ -37,9 +38,9 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study, data: dict, role, central_contact): """Creates a new study from a dictionary""" - study_contact = StudyContact(study) + study_contact = StudyContact(study, role, central_contact) study_contact.update(data) return study_contact @@ -48,11 +49,10 @@ def update(self, data): """Updates the study from a dictionary""" self.name = data["name"] self.affiliation = data["affiliation"] - self.role = data["role"] + # self.role = data["role"] self.phone = data["phone"] self.phone_ext = data["phone_ext"] self.email_address = data["email_address"] - self.central_contact = data["central_contact"] def validate(self): """Validates the lead_sponsor_last_name study""" From 10cab0b68c20836886986782da4a47ac61686255 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 8 Sep 2023 19:56:41 +0000 Subject: [PATCH 118/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/study_metadata/study_contact.py | 1 + 1 file changed, 1 insertion(+) diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index e1675db3..41e0595a 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -10,6 +10,7 @@ def __init__(self, study, role, central_contact): self.study = study self.role = role self.central_contact = central_contact + __tablename__ = "study_contact" id = db.Column(db.CHAR(36), primary_key=True) From 536bf0f8d11d02ce99c4c1ebb129d5d88571b0b9 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 8 Sep 2023 13:53:17 -0700 Subject: [PATCH 119/505] fix: metadata/conditions return array --- apis/study_metadata/study_other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index d5c5e0c1..c9b4fafc 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -68,7 +68,7 @@ class StudyOversightResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_other_conditions = study_.study_other.conditions - return {"conditions": study_other_conditions} + return study_other_conditions def put(self, study_id: int): data = request.json From 581cc20a50579529dd8b7f8a720bae4d3a78620a Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 8 Sep 2023 15:33:09 -0700 Subject: [PATCH 120/505] fix: metadata/conditions PUT endpoint error --- apis/study_metadata/study_other.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index c9b4fafc..1dede7e3 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -63,7 +63,6 @@ class StudyOversightResource(Resource): @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.param("id", "The study identifier") # @api.marshal_with(study_other) def get(self, study_id: int): study_ = Study.query.get(study_id) @@ -73,6 +72,6 @@ def get(self, study_id: int): def put(self, study_id: int): data = request.json study_ = Study.query.get(study_id) - study_.study_other.conditions = data["conditions"] + study_.study_other.conditions = ["conditions"] db.session.commit() return study_.study_other.conditions From ef013498aefda378e34a7400a8ae256a1cb69834 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 8 Sep 2023 15:41:01 -0700 Subject: [PATCH 121/505] fix: metadata/identification POST method --- apis/study_metadata/study_identification.py | 7 ++++--- model/study_metadata/study_identification.py | 9 ++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index e21a4e0b..a3709e1a 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -35,19 +35,20 @@ def post(self, study_id: int): data = request.json study_obj = Study.query.get(study_id) primary = data["primary"] + primary["secondary"] = False if "id" in primary and primary["id"]: study_identification_ = StudyIdentification.query.get(primary["id"]) study_identification_.update(primary) elif "id" not in primary or not primary["id"]: - study_identification_ = StudyIdentification.from_data(study_obj, primary) + study_identification_ = StudyIdentification.from_data(study_obj, primary, False) db.session.add(study_identification_) - for i in data["secondary"]: + i["secondary"] = True if "id" in i and i["id"]: study_identification_ = StudyIdentification.query.get(i["id"]) study_identification_.update(i) elif "id" not in i or not i["id"]: - study_identification_ = StudyIdentification.from_data(study_obj, i) + study_identification_ = StudyIdentification.from_data(study_obj, i, True) db.session.add(study_identification_) db.session.commit() identifiers = Identifiers(study_obj) diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 89ef7b00..cd281ed6 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -3,10 +3,10 @@ class StudyIdentification(db.Model): - def __init__(self, study): + def __init__(self, study, secondary): self.id = str(uuid.uuid4()) self.study = study - + self.secondary = secondary __tablename__ = "study_identification" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) @@ -28,9 +28,9 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study, data: dict, secondary): """Creates a new study from a dictionary""" - study_identification = StudyIdentification(study) + study_identification = StudyIdentification(study, secondary) study_identification.update(data) return study_identification @@ -41,7 +41,6 @@ def update(self, data): self.identifier_type = data["identifier_type"] self.identifier_domain = data["identifier_domain"] self.identifier_link = data["identifier_link"] - self.secondary = data["secondary"] def validate(self): """Validates the lead_sponsor_last_name study""" From bacd6628b41a66e7a7829cefb51a78c7d1810eae Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 8 Sep 2023 22:41:43 +0000 Subject: [PATCH 122/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_identification.py | 8 ++++++-- model/study_metadata/study_identification.py | 1 + 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index a3709e1a..8c286d2e 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -40,7 +40,9 @@ def post(self, study_id: int): study_identification_ = StudyIdentification.query.get(primary["id"]) study_identification_.update(primary) elif "id" not in primary or not primary["id"]: - study_identification_ = StudyIdentification.from_data(study_obj, primary, False) + study_identification_ = StudyIdentification.from_data( + study_obj, primary, False + ) db.session.add(study_identification_) for i in data["secondary"]: i["secondary"] = True @@ -48,7 +50,9 @@ def post(self, study_id: int): study_identification_ = StudyIdentification.query.get(i["id"]) study_identification_.update(i) elif "id" not in i or not i["id"]: - study_identification_ = StudyIdentification.from_data(study_obj, i, True) + study_identification_ = StudyIdentification.from_data( + study_obj, i, True + ) db.session.add(study_identification_) db.session.commit() identifiers = Identifiers(study_obj) diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index cd281ed6..e94fa55c 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -7,6 +7,7 @@ def __init__(self, study, secondary): self.id = str(uuid.uuid4()) self.study = study self.secondary = secondary + __tablename__ = "study_identification" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) From 969f5a0113cc02dcac726284721125a49f5dc030 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 8 Sep 2023 17:42:41 -0700 Subject: [PATCH 123/505] fix: removed MARSHAL extra params --- apis/dataset_metadata/dataset_readme.py | 1 - apis/dataset_metadata/dataset_related_item.py | 1 - apis/dataset_metadata/dataset_related_item_contributor.py | 1 - apis/dataset_metadata/dataset_related_item_identifier.py | 1 - apis/study_metadata/study_description.py | 1 - apis/study_metadata/study_other.py | 5 ++--- 6 files changed, 2 insertions(+), 8 deletions(-) diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index 9b077ace..ee344ee9 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -16,7 +16,6 @@ class DatasetReadmeResource(Resource): @api.doc("readme") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_readme) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 748c8d69..b2bb5450 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -19,7 +19,6 @@ class DatasetRelatedItemResource(Resource): @api.doc("related_item") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_related_item) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py index 14b27580..3a0d9d0e 100644 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -21,7 +21,6 @@ class DatasetRelatedItemContributorResource(Resource): @api.doc("related_item_identifier") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") # @api.marshal_with(dataset_related_item_contributor) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py index e08b88f3..041d308c 100644 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -22,7 +22,6 @@ class DatasetRelatedItemContributorResource(Resource): @api.doc("related_item_contributor") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") # @api.marshal_with(dataset_related_item_contributor) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index cfbce6d7..9da7c40f 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -21,7 +21,6 @@ class StudyDescriptionResource(Resource): @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") @api.marshal_with(study_description) def get(self, study_id: int): study_ = Study.query.get(study_id) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 1dede7e3..0ded8612 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -38,10 +38,9 @@ def put(self, study_id: int): @api.route("/study//metadata/oversight") class StudyOversightResource(Resource): - @api.doc("other") + @api.doc("oversight") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.param("id", "The study identifier") # @api.marshal_with(study_other) def get(self, study_id: int): study_ = Study.query.get(study_id) @@ -60,7 +59,7 @@ def put(self, study_id: int): @api.route("/study//metadata/conditions") class StudyOversightResource(Resource): - @api.doc("other") + @api.doc("conditions") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_other) From 99567fc91fd48e3d17c8551d0fe6b4bce1ce706a Mon Sep 17 00:00:00 2001 From: aydawka Date: Sat, 9 Sep 2023 22:52:41 -0700 Subject: [PATCH 124/505] fix: metadata/other PUT function --- apis/dataset_metadata/dataset_record_keys.py | 1 - apis/study_metadata/study_other.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index af231743..e0156fd2 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -19,7 +19,6 @@ class DatasetRecordKeysResource(Resource): @api.doc("record_keys") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_record_keys) def get(self, study_id: int, dataset_id: int): dataset_ = Dataset.query.get(dataset_id) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 0ded8612..c3bd2500 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -71,6 +71,6 @@ def get(self, study_id: int): def put(self, study_id: int): data = request.json study_ = Study.query.get(study_id) - study_.study_other.conditions = ["conditions"] + study_.study_other.conditions = data db.session.commit() return study_.study_other.conditions From 2f6ce791394ad4970d5cd4fb6580bd91f86f1f53 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sat, 9 Sep 2023 23:16:07 -0700 Subject: [PATCH 125/505] fix: metadata/collaborator PUT method --- apis/study_metadata/study_sponsors_collaborators.py | 4 +--- model/study.py | 4 ++++ 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 390642a1..297d7715 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -61,9 +61,7 @@ def get(self, study_id: int): def put(self, study_id: int): data = request.json study_ = Study.query.get(study_id) - study_.study_sponsors_collaborators.collaborator_name = data[ - "collaborator_name" - ] + study_.study_sponsors_collaborators.collaborator_name = data db.session.commit() return study_.study_sponsors_collaborators.collaborator_name diff --git a/model/study.py b/model/study.py index 35c3c746..2be2a2d8 100644 --- a/model/study.py +++ b/model/study.py @@ -1,4 +1,5 @@ import uuid +from datetime import datetime from datetime import timezone import model from .db import db @@ -101,3 +102,6 @@ def validate(self): # if self.keywords.length < 1: # violations.push("At least one keyword must be specified") return violations + + # def touch(self): + # self.updated_on = datetime.datetime.now() From d1a04ac38bdb64c3ff99c05ac67adfee43a48b06 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 10 Sep 2023 19:58:05 -0700 Subject: [PATCH 126/505] fix: update updated_on in study table when any study metadata updated --- model/study.py | 4 ++-- model/study_metadata/study_arm.py | 2 ++ model/study_metadata/study_available_ipd.py | 1 + model/study_metadata/study_contact.py | 1 + model/study_metadata/study_description.py | 1 + model/study_metadata/study_design.py | 1 + model/study_metadata/study_eligibility.py | 1 + model/study_metadata/study_identification.py | 1 + model/study_metadata/study_intervention.py | 1 + model/study_metadata/study_ipdsharing.py | 1 + model/study_metadata/study_link.py | 1 + model/study_metadata/study_location.py | 1 + model/study_metadata/study_other.py | 1 + model/study_metadata/study_overall_official.py | 1 + model/study_metadata/study_reference.py | 1 + model/study_metadata/study_sponsors_collaborators.py | 1 + model/study_metadata/study_status.py | 1 + 17 files changed, 19 insertions(+), 2 deletions(-) diff --git a/model/study.py b/model/study.py index 2be2a2d8..b6a4ef64 100644 --- a/model/study.py +++ b/model/study.py @@ -103,5 +103,5 @@ def validate(self): # violations.push("At least one keyword must be specified") return violations - # def touch(self): - # self.updated_on = datetime.datetime.now() + def touch(self): + self.updated_on = datetime.datetime.now(timezone.utc).timestamp() diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 218e2693..d8fdebde 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -46,6 +46,8 @@ def update(self, data): self.type = data["type"] self.description = data["description"] self.intervention_list = data["intervention_list"] + self.study.touch() + def validate(self): """Validates the study""" diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 50f03bc1..1987e3d9 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -44,6 +44,7 @@ def update(self, data): self.type = data["type"] self.url = data["url"] self.comment = data["comment"] + self.study.touch() def validate(self): """Validates the study""" diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 41e0595a..d8f143bb 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -54,6 +54,7 @@ def update(self, data): self.phone = data["phone"] self.phone_ext = data["phone_ext"] self.email_address = data["email_address"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index 14b7fa52..cad0558e 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -40,6 +40,7 @@ def update(self, data): """Updates the study from a dictionary""" self.brief_summary = data["brief_summary"] self.detailed_description = data["detailed_description"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index ab5b5e1a..f265928d 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -110,6 +110,7 @@ def update(self, data): self.bio_spec_description = data["bio_spec_description"] self.target_duration = data["target_duration"] self.number_groups_cohorts = data["number_groups_cohorts"] + self.study.touch() def validate(self): """Validates the study""" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index a5e1aadd..05591d71 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -85,6 +85,7 @@ def update(self, data): self.exclusion_criteria = data["exclusion_criteria"] self.study_population = data["study_population"] self.sampling_method = data["sampling_method"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index e94fa55c..ef859828 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -42,6 +42,7 @@ def update(self, data): self.identifier_type = data["identifier_type"] self.identifier_domain = data["identifier_domain"] self.identifier_link = data["identifier_link"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 584a1b65..96a14469 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -49,6 +49,7 @@ def update(self, data): self.description = data["description"] self.arm_group_label_list = data["arm_group_label_list"] self.other_name_list = data["other_name_list"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index af9ba31f..f7d58b93 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -58,6 +58,7 @@ def update(self, data): self.ipd_sharing_time_frame = data["ipd_sharing_time_frame"] self.ipd_sharing_access_criteria = data["ipd_sharing_access_criteria"] self.ipd_sharing_url = data["ipd_sharing_url"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 5c9a4613..5a62df36 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -33,6 +33,7 @@ def update(self, data): """Updates the study from a dictionary""" self.url = data["url"] self.title = data["title"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 1a954383..7a5292ad 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -50,6 +50,7 @@ def update(self, data): self.state = data["state"] self.zip = data["zip"] self.country = data["country"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 3f14ff3f..ace65790 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -51,6 +51,7 @@ def update(self, data): self.conditions = data["conditions"] self.keywords = data["keywords"] self.size = data["size"] + self.study.touch() def validate(self): """Validates the study""" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 0f056f7b..19d6db14 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -41,6 +41,7 @@ def update(self, data): self.name = data["name"] self.affiliation = data["affiliation"] self.role = data["role"] + self.study.touch() def validate(self): """Validates the study""" diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 5c8fdf1a..d4c5ea05 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -42,6 +42,7 @@ def update(self, data): self.identifier = data["identifier"] self.type = data["type"] self.citation = data["citation"] + self.study.touch() def validate(self): """Validates the study""" diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 3b527623..b781b912 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -75,6 +75,7 @@ def from_data_(study, data: dict): def update_collaborators(self, data): """Updates the study from a dictionary""" self.collaborator_name = data["collaborator_name"] + self.study.touch() def validate(self): """Validates the lead_sponsor_last_name study""" diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 6e56b338..4e37ca0c 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -57,6 +57,7 @@ def update(self, data): self.start_date_type = data["start_date_type"] self.completion_date = data["completion_date"] self.completion_date_type = data["completion_date_type"] + self.study.touch() def validate(self): """Validates the study""" From f016d3839e78f71a8f7246665183d5fb3a4ed164 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 10 Sep 2023 22:57:22 -0700 Subject: [PATCH 127/505] fix: update updated_on in study table when any study metadata updated --- apis/study_metadata/study_other.py | 2 ++ apis/study_metadata/study_sponsors_collaborators.py | 3 +-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index c3bd2500..c8deadc7 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -53,6 +53,7 @@ def put(self, study_id: int): study_oversight = study_.study_other.oversight_has_dmc = data[ "oversight_has_dmc" ] + study_.touch() db.session.commit() return study_oversight @@ -72,5 +73,6 @@ def put(self, study_id: int): data = request.json study_ = Study.query.get(study_id) study_.study_other.conditions = data + study_.touch() db.session.commit() return study_.study_other.conditions diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 297d7715..7a0a030e 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -32,7 +32,6 @@ class StudySponsorsResource(Resource): @api.doc("sponsors") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") @api.marshal_with(study_sponsors) def get(self, study_id: int): study_ = Study.query.get(study_id) @@ -51,7 +50,6 @@ class StudyCollaboratorsResource(Resource): @api.doc("collaborators") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") # @api.marshal_with(study_collaborators) def get(self, study_id: int): study_ = Study.query.get(study_id) @@ -62,6 +60,7 @@ def put(self, study_id: int): data = request.json study_ = Study.query.get(study_id) study_.study_sponsors_collaborators.collaborator_name = data + study_.touch() db.session.commit() return study_.study_sponsors_collaborators.collaborator_name From 01e987eb512d29bf52d995855bb174fdd41b174b Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 11 Sep 2023 06:00:47 +0000 Subject: [PATCH 128/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/study_metadata/study_arm.py | 1 - 1 file changed, 1 deletion(-) diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index d8fdebde..9095cc30 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -48,7 +48,6 @@ def update(self, data): self.intervention_list = data["intervention_list"] self.study.touch() - def validate(self): """Validates the study""" violations = [] From e7b3ee0e3f4d5d1beb91bdf1fff9f4888a803287 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 10 Sep 2023 23:06:10 -0700 Subject: [PATCH 129/505] fix: update timezone_init.sql --- sql/init_timezones.sql | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 1aa2758a..bb5a7f0d 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -980,7 +980,8 @@ CREATE TABLE IF NOT EXISTS "study_status" ( "completion_date_type" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), - CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY +("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_status: 2 rows From 1232eee3b5ab291e4ace3e0d895f545b8649f6dd Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 11 Sep 2023 15:44:19 -0700 Subject: [PATCH 130/505] wip: schema is run if there is no table --- app.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/app.py b/app.py index f23acddc..65188979 100644 --- a/app.py +++ b/app.py @@ -1,6 +1,7 @@ """Entry point for the application.""" from flask import Flask from flask_cors import CORS +from sqlalchemy import MetaData import model from apis import api @@ -47,10 +48,14 @@ def create_app(): @app.cli.command("create-schema") def create_schema(): engine = model.db.session.get_bind() - with engine.begin() as conn: - """Create the database schema.""" - model.db.create_all() - + metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + print(table_names) + if len(table_names) == 0: + with engine.begin() as conn: + """Create the database schema.""" + model.db.create_all() @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() @@ -70,8 +75,8 @@ def destroy_schema(): if __name__ == "__main__": - from argparse import ArgumentParser + from argparse import ArgumentParser parser = ArgumentParser() parser.add_argument( "-p", "--port", default=5000, type=int, help="port to listen on" @@ -80,5 +85,4 @@ def destroy_schema(): port = args.port app = create_app() - app.run(host="0.0.0.0", port=port) From bcbbb6bb4ad4ccbc0b0308aca65d9142d7a77de0 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 11 Sep 2023 18:42:55 -0700 Subject: [PATCH 131/505] feat: sort arrays of m-to-m study metadata basd on created timestamp --- apis/study_metadata/study_available_ipd.py | 7 +++++-- apis/study_metadata/study_contact.py | 3 ++- apis/study_metadata/study_identification.py | 1 + apis/study_metadata/study_intervention.py | 3 ++- apis/study_metadata/study_link.py | 3 ++- apis/study_metadata/study_location.py | 3 ++- apis/study_metadata/study_overall_official.py | 6 ++++-- apis/study_metadata/study_reference.py | 4 +++- model/study_metadata/arm.py | 3 ++- model/study_metadata/identifiers.py | 17 +++++++---------- model/study_metadata/study_arm.py | 7 ++++++- model/study_metadata/study_available_ipd.py | 6 ++++++ model/study_metadata/study_contact.py | 8 +++++++- model/study_metadata/study_identification.py | 6 ++++++ model/study_metadata/study_intervention.py | 7 ++++++- model/study_metadata/study_link.py | 15 +++++++++++++-- model/study_metadata/study_location.py | 6 ++++++ model/study_metadata/study_overall_official.py | 5 +++++ model/study_metadata/study_reference.py | 6 ++++++ 19 files changed, 91 insertions(+), 25 deletions(-) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 8bcc185d..9dabfd84 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -24,8 +24,11 @@ class StudyAvailableResource(Resource): # @api.marshal_with(study_available) def get(self, study_id: int): study_ = Study.query.get(study_id) - study_available_ipd = study_.study_available_ipd - return [s.to_dict() for s in study_available_ipd] + study_available_ipd_ = study_.study_available_ipd + sorted_study_available_ipd = sorted(study_available_ipd_, key=lambda x: x.created_at, reverse=True) + return [s.to_dict() for s in sorted_study_available_ipd] + + @api.doc("update available") @api.response(200, "Success") diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 05084e34..8251978c 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -27,7 +27,8 @@ class StudyContactResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_contact_ = study_.study_contact - return [s.to_dict() for s in study_contact_ if s.central_contact] + sorted_study_contact = sorted(study_contact_, key=lambda x: x.created_at, reverse=True) + return [s.to_dict() for s in sorted_study_contact if s.central_contact] def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 8c286d2e..7a270fc0 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -31,6 +31,7 @@ def get(self, study_id: int): identifiers = Identifiers(study_) return identifiers.to_dict() + def post(self, study_id: int): data = request.json study_obj = Study.query.get(study_id) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index c0d0e8e7..80b657f4 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -29,7 +29,8 @@ class StudyInterventionResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_intervention_ = study_.study_intervention - return [s.to_dict() for s in study_intervention_] + sorted_study_intervention = sorted(study_intervention_, key=lambda x: x.created_at, reverse=True) + return [s.to_dict() for s in sorted_study_intervention] def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index b30bb382..0f866601 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -26,7 +26,8 @@ class StudyLinkResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_link_ = study_.study_link - return [s.to_dict() for s in study_link_] + sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at, reverse=True) + return [s.to_dict() for s in sorted_study_link_] def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index c53b6f02..3ceaf8b3 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -30,7 +30,8 @@ class StudyLocationResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_location_ = study_.study_location - return [s.to_dict() for s in study_location_] + sorted_study_location = sorted(study_location_, key=lambda x: x.created_at, reverse=True) + return [s.to_dict() for s in sorted_study_location] def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index f63a071f..e4994f03 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -23,11 +23,13 @@ class StudyOverallOfficialResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - @api.marshal_with(study_overall_official) + #@api.marshal_with(study_overall_official) def get(self, study_id: int): study_ = Study.query.get(study_id) study_overall_official_ = study_.study_overall_official - return [s.to_dict() for s in study_overall_official_] + # sorted_by_date = sorted([i.created_at for i in study_overall_official_], reverse=True) + sorted_study_overall = sorted(study_overall_official_, key=lambda x: x.created_at, reverse=True) + return [i.to_dict() for i in sorted_study_overall] def post(self, study_id: int): data = request.json diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 7a163614..d57b2249 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -28,7 +28,9 @@ class StudyReferenceResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_reference_ = study_.study_reference - return [s.to_dict() for s in study_reference_] + print(study_.study_reference) + sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at, reverse=True) + return [s.to_dict() for s in sorted_study_reference] def post(self, study_id: int): data = request.json diff --git a/model/study_metadata/arm.py b/model/study_metadata/arm.py index ef84cb8e..ee3a2d86 100644 --- a/model/study_metadata/arm.py +++ b/model/study_metadata/arm.py @@ -8,7 +8,8 @@ def __init__(self, study: Study): study: Study def to_dict(self): + sorted_study_arms = sorted(self.study.study_arm, key=lambda arm: arm.created_at, reverse=True) return { - "arms": [arm.to_dict() for arm in self.study.study_arm], + "arms": [arm.to_dict() for arm in sorted_study_arms], "study_type": self.study.study_design.study_type, } diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 5ae8fbf6..18d345be 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -7,16 +7,13 @@ def __init__(self, study: Study): study: Study + def to_dict(self): + sorted_study_identifications = sorted(self.study.study_identification, key=lambda identifier: identifier.created_at , reverse=True) return { - "primary": [ - identifier - for identifier in self.study.study_identification - if not identifier.secondary - ][0].to_dict(), - "secondary": [ - identifier.to_dict() - for identifier in self.study.study_identification - if identifier.secondary - ], + "primary": [identifier for identifier in sorted_study_identifications if not identifier.secondary][0].to_dict(), + "secondary": [identifier.to_dict() for identifier in sorted_study_identifications if identifier.secondary], } + # sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at, reverse=True) + # return [s.to_dict() for s in sorted_study_reference] + diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 9095cc30..d1c1716b 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -3,7 +3,8 @@ from sqlalchemy.dialects.postgresql import ARRAY from ..db import db - +from datetime import timezone +import datetime class StudyArm(db.Model): """A study is a collection of datasets and participants""" @@ -11,6 +12,7 @@ class StudyArm(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_arm" @@ -19,6 +21,7 @@ def __init__(self, study): type = db.Column(db.String, nullable=False) description = db.Column(db.String, nullable=False) intervention_list = db.Column(ARRAY(String), nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_arm") @@ -31,6 +34,8 @@ def to_dict(self): "type": self.type, "description": str(self.description), "intervention_list": self.intervention_list, + "created_at": self.created_at + } @staticmethod diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 1987e3d9..bf7405cc 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -1,6 +1,8 @@ import uuid from ..db import db +from datetime import timezone +import datetime class StudyAvailableIpd(db.Model): @@ -9,6 +11,7 @@ class StudyAvailableIpd(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_available_ipd" @@ -17,6 +20,7 @@ def __init__(self, study): type = db.Column(db.String, nullable=False) url = db.Column(db.String, nullable=False) comment = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_available_ipd") @@ -29,6 +33,8 @@ def to_dict(self): "type": self.type, "url": self.url, "comment": self.comment, + "created_at": self.created_at + } @staticmethod diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index d8f143bb..0607866b 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -1,6 +1,7 @@ import uuid from ..db import db - +from datetime import timezone +import datetime class StudyContact(db.Model): """A study is a collection of datasets and participants""" @@ -10,6 +11,8 @@ def __init__(self, study, role, central_contact): self.study = study self.role = role self.central_contact = central_contact + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + __tablename__ = "study_contact" @@ -21,6 +24,7 @@ def __init__(self, study, role, central_contact): phone_ext = db.Column(db.String, nullable=False) email_address = db.Column(db.String, nullable=False) central_contact = db.Column(db.BOOLEAN, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_contact") @@ -36,6 +40,8 @@ def to_dict(self): "phone_ext": self.phone_ext, "email_address": self.email_address, "central_contact": self.central_contact, + "created_at": self.created_at + } @staticmethod diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index ef859828..fa255790 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -1,5 +1,7 @@ import uuid from ..db import db +from datetime import timezone +import datetime class StudyIdentification(db.Model): @@ -7,6 +9,7 @@ def __init__(self, study, secondary): self.id = str(uuid.uuid4()) self.study = study self.secondary = secondary + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_identification" id = db.Column(db.CHAR(36), primary_key=True) @@ -15,6 +18,7 @@ def __init__(self, study, secondary): identifier_domain = db.Column(db.String, nullable=False) identifier_link = db.Column(db.String, nullable=False) secondary = db.Column(db.BOOLEAN, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_identification") @@ -26,6 +30,8 @@ def to_dict(self): "identifier_type": self.identifier_type, "identifier_domain": self.identifier_domain, "identifier_link": self.identifier_link, + "created_at": self.created_at + } @staticmethod diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 96a14469..525e3b5d 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -2,7 +2,8 @@ from ..db import db from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY - +from datetime import timezone +import datetime class StudyIntervention(db.Model): """A study is a collection of datasets and participants""" @@ -10,6 +11,7 @@ class StudyIntervention(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_intervention" @@ -19,6 +21,7 @@ def __init__(self, study): description = db.Column(db.String, nullable=False) arm_group_label_list = db.Column(ARRAY(String), nullable=False) other_name_list = db.Column(ARRAY(String), nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_intervention") @@ -32,6 +35,8 @@ def to_dict(self): "description": self.description, "arm_group_label_list": self.arm_group_label_list, "other_name_list": self.other_name_list, + "created_at": self.created_at + } @staticmethod diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 5a62df36..7d756d97 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -1,6 +1,7 @@ import uuid from ..db import db - +from datetime import timezone +import datetime class StudyLink(db.Model): """A study is a collection of datasets and participants""" @@ -8,25 +9,35 @@ class StudyLink(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + __tablename__ = "study_link" id = db.Column(db.CHAR(36), primary_key=True) url = db.Column(db.String, nullable=False) title = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_link") def to_dict(self): """Converts the study to a dictionary""" - return {"id": self.id, "url": self.url, "title": self.title} + return { + "id": self.id, + "url": self.url, + "title": self.title, + "created_at": self.created_at + + } @staticmethod def from_data(study, data: dict): """Creates a new study from a dictionary""" study_link = StudyLink(study) study_link.update(data) + return study_link def update(self, data): diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 7a5292ad..17831d85 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -1,5 +1,7 @@ import uuid from ..db import db +from datetime import timezone +import datetime class StudyLocation(db.Model): @@ -8,6 +10,7 @@ class StudyLocation(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_location" @@ -18,6 +21,7 @@ def __init__(self, study): state = db.Column(db.String, nullable=False) zip = db.Column(db.String, nullable=False) country = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_location") @@ -32,6 +36,8 @@ def to_dict(self): "state": self.state, "zip": self.zip, "country": self.country, + "created_at": self.created_at + } @staticmethod diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 19d6db14..49aa2c51 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -1,5 +1,7 @@ import uuid from ..db import db +from datetime import timezone +import datetime class StudyOverallOfficial(db.Model): @@ -8,6 +10,7 @@ class StudyOverallOfficial(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_overall_official" @@ -15,6 +18,7 @@ def __init__(self, study): name = db.Column(db.String, nullable=False) affiliation = db.Column(db.String, nullable=False) role = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_overall_official") @@ -26,6 +30,7 @@ def to_dict(self): "name": self.name, "affiliation": self.affiliation, "role": self.role, + "created_at": self.created_at } @staticmethod diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index d4c5ea05..8e223669 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -1,6 +1,8 @@ import uuid from ..db import db +from datetime import timezone +import datetime class StudyReference(db.Model): @@ -9,6 +11,7 @@ class StudyReference(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_reference" @@ -16,6 +19,7 @@ def __init__(self, study): identifier = db.Column(db.String, nullable=False) type = db.Column(db.String, nullable=False) citation = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_reference") @@ -27,6 +31,8 @@ def to_dict(self): "identifier": self.identifier, "type": self.type, "citation": self.citation, + "created_at": self.created_at + } @staticmethod From 23b94f83fbb9e20d9c747b11b64b946c719669cf Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 12 Sep 2023 01:43:30 +0000 Subject: [PATCH 132/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_available_ipd.py | 6 +++--- apis/study_metadata/study_contact.py | 4 +++- apis/study_metadata/study_identification.py | 1 - apis/study_metadata/study_intervention.py | 4 +++- apis/study_metadata/study_link.py | 4 +++- apis/study_metadata/study_location.py | 4 +++- apis/study_metadata/study_overall_official.py | 6 ++++-- apis/study_metadata/study_reference.py | 4 +++- app.py | 3 ++- model/study_metadata/arm.py | 4 +++- model/study_metadata/identifiers.py | 20 ++++++++++++++----- model/study_metadata/study_arm.py | 4 ++-- model/study_metadata/study_available_ipd.py | 3 +-- model/study_metadata/study_contact.py | 5 ++--- model/study_metadata/study_identification.py | 3 +-- model/study_metadata/study_intervention.py | 4 ++-- model/study_metadata/study_link.py | 5 ++--- model/study_metadata/study_location.py | 3 +-- .../study_metadata/study_overall_official.py | 2 +- model/study_metadata/study_reference.py | 3 +-- 20 files changed, 55 insertions(+), 37 deletions(-) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 9dabfd84..6f252e3f 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -25,11 +25,11 @@ class StudyAvailableResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_available_ipd_ = study_.study_available_ipd - sorted_study_available_ipd = sorted(study_available_ipd_, key=lambda x: x.created_at, reverse=True) + sorted_study_available_ipd = sorted( + study_available_ipd_, key=lambda x: x.created_at, reverse=True + ) return [s.to_dict() for s in sorted_study_available_ipd] - - @api.doc("update available") @api.response(200, "Success") @api.response(400, "Validation Error") diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 8251978c..e9e8c6ab 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -27,7 +27,9 @@ class StudyContactResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_contact_ = study_.study_contact - sorted_study_contact = sorted(study_contact_, key=lambda x: x.created_at, reverse=True) + sorted_study_contact = sorted( + study_contact_, key=lambda x: x.created_at, reverse=True + ) return [s.to_dict() for s in sorted_study_contact if s.central_contact] def post(self, study_id: int): diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 7a270fc0..8c286d2e 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -31,7 +31,6 @@ def get(self, study_id: int): identifiers = Identifiers(study_) return identifiers.to_dict() - def post(self, study_id: int): data = request.json study_obj = Study.query.get(study_id) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 80b657f4..e1c43136 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -29,7 +29,9 @@ class StudyInterventionResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_intervention_ = study_.study_intervention - sorted_study_intervention = sorted(study_intervention_, key=lambda x: x.created_at, reverse=True) + sorted_study_intervention = sorted( + study_intervention_, key=lambda x: x.created_at, reverse=True + ) return [s.to_dict() for s in sorted_study_intervention] def post(self, study_id: int): diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 0f866601..c37c4525 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -26,7 +26,9 @@ class StudyLinkResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_link_ = study_.study_link - sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at, reverse=True) + sorted_study_link_ = sorted( + study_link_, key=lambda x: x.created_at, reverse=True + ) return [s.to_dict() for s in sorted_study_link_] def post(self, study_id: int): diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 3ceaf8b3..87e8e824 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -30,7 +30,9 @@ class StudyLocationResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_location_ = study_.study_location - sorted_study_location = sorted(study_location_, key=lambda x: x.created_at, reverse=True) + sorted_study_location = sorted( + study_location_, key=lambda x: x.created_at, reverse=True + ) return [s.to_dict() for s in sorted_study_location] def post(self, study_id: int): diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index e4994f03..81095f60 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -23,12 +23,14 @@ class StudyOverallOfficialResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - #@api.marshal_with(study_overall_official) + # @api.marshal_with(study_overall_official) def get(self, study_id: int): study_ = Study.query.get(study_id) study_overall_official_ = study_.study_overall_official # sorted_by_date = sorted([i.created_at for i in study_overall_official_], reverse=True) - sorted_study_overall = sorted(study_overall_official_, key=lambda x: x.created_at, reverse=True) + sorted_study_overall = sorted( + study_overall_official_, key=lambda x: x.created_at, reverse=True + ) return [i.to_dict() for i in sorted_study_overall] def post(self, study_id: int): diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index d57b2249..49b1b386 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -29,7 +29,9 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_reference_ = study_.study_reference print(study_.study_reference) - sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at, reverse=True) + sorted_study_reference = sorted( + study_reference_, key=lambda x: x.created_at, reverse=True + ) return [s.to_dict() for s in sorted_study_reference] def post(self, study_id: int): diff --git a/app.py b/app.py index 65188979..50b210d3 100644 --- a/app.py +++ b/app.py @@ -56,6 +56,7 @@ def create_schema(): with engine.begin() as conn: """Create the database schema.""" model.db.create_all() + @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() @@ -75,8 +76,8 @@ def destroy_schema(): if __name__ == "__main__": - from argparse import ArgumentParser + parser = ArgumentParser() parser.add_argument( "-p", "--port", default=5000, type=int, help="port to listen on" diff --git a/model/study_metadata/arm.py b/model/study_metadata/arm.py index ee3a2d86..8b4e0c97 100644 --- a/model/study_metadata/arm.py +++ b/model/study_metadata/arm.py @@ -8,7 +8,9 @@ def __init__(self, study: Study): study: Study def to_dict(self): - sorted_study_arms = sorted(self.study.study_arm, key=lambda arm: arm.created_at, reverse=True) + sorted_study_arms = sorted( + self.study.study_arm, key=lambda arm: arm.created_at, reverse=True + ) return { "arms": [arm.to_dict() for arm in sorted_study_arms], "study_type": self.study.study_design.study_type, diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 18d345be..07482c8b 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -7,13 +7,23 @@ def __init__(self, study: Study): study: Study - def to_dict(self): - sorted_study_identifications = sorted(self.study.study_identification, key=lambda identifier: identifier.created_at , reverse=True) + sorted_study_identifications = sorted( + self.study.study_identification, + key=lambda identifier: identifier.created_at, + reverse=True, + ) return { - "primary": [identifier for identifier in sorted_study_identifications if not identifier.secondary][0].to_dict(), - "secondary": [identifier.to_dict() for identifier in sorted_study_identifications if identifier.secondary], + "primary": [ + identifier + for identifier in sorted_study_identifications + if not identifier.secondary + ][0].to_dict(), + "secondary": [ + identifier.to_dict() + for identifier in sorted_study_identifications + if identifier.secondary + ], } # sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at, reverse=True) # return [s.to_dict() for s in sorted_study_reference] - diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index d1c1716b..49a91a24 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -6,6 +6,7 @@ from datetime import timezone import datetime + class StudyArm(db.Model): """A study is a collection of datasets and participants""" @@ -34,8 +35,7 @@ def to_dict(self): "type": self.type, "description": str(self.description), "intervention_list": self.intervention_list, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index bf7405cc..d955ae3b 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -33,8 +33,7 @@ def to_dict(self): "type": self.type, "url": self.url, "comment": self.comment, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 0607866b..bdcc507d 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -3,6 +3,7 @@ from datetime import timezone import datetime + class StudyContact(db.Model): """A study is a collection of datasets and participants""" @@ -13,7 +14,6 @@ def __init__(self, study, role, central_contact): self.central_contact = central_contact self.created_at = datetime.datetime.now(timezone.utc).timestamp() - __tablename__ = "study_contact" id = db.Column(db.CHAR(36), primary_key=True) @@ -40,8 +40,7 @@ def to_dict(self): "phone_ext": self.phone_ext, "email_address": self.email_address, "central_contact": self.central_contact, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index fa255790..67485de2 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -30,8 +30,7 @@ def to_dict(self): "identifier_type": self.identifier_type, "identifier_domain": self.identifier_domain, "identifier_link": self.identifier_link, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 525e3b5d..68569a68 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -5,6 +5,7 @@ from datetime import timezone import datetime + class StudyIntervention(db.Model): """A study is a collection of datasets and participants""" @@ -35,8 +36,7 @@ def to_dict(self): "description": self.description, "arm_group_label_list": self.arm_group_label_list, "other_name_list": self.other_name_list, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 7d756d97..bf46e638 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -3,6 +3,7 @@ from datetime import timezone import datetime + class StudyLink(db.Model): """A study is a collection of datasets and participants""" @@ -11,7 +12,6 @@ def __init__(self, study): self.study = study self.created_at = datetime.datetime.now(timezone.utc).timestamp() - __tablename__ = "study_link" id = db.Column(db.CHAR(36), primary_key=True) @@ -28,8 +28,7 @@ def to_dict(self): "id": self.id, "url": self.url, "title": self.title, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 17831d85..925980cd 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -36,8 +36,7 @@ def to_dict(self): "state": self.state, "zip": self.zip, "country": self.country, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 49aa2c51..f3df7f1d 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -30,7 +30,7 @@ def to_dict(self): "name": self.name, "affiliation": self.affiliation, "role": self.role, - "created_at": self.created_at + "created_at": self.created_at, } @staticmethod diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 8e223669..8804420a 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -31,8 +31,7 @@ def to_dict(self): "identifier": self.identifier, "type": self.type, "citation": self.citation, - "created_at": self.created_at - + "created_at": self.created_at, } @staticmethod From 09daabf878e69faed4a1aa4000527e7aa3e22f3a Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 12 Sep 2023 13:26:55 -0700 Subject: [PATCH 133/505] feat: added logic to create schema if tables are empty --- app.py | 41 +- db-docker-compose.yaml | 4 +- sql/init.sql | 919 ++++++++++++++++++++--------------------- sql/init_timezones.sql | 115 +++--- 4 files changed, 528 insertions(+), 551 deletions(-) diff --git a/app.py b/app.py index 65188979..83b6cb2b 100644 --- a/app.py +++ b/app.py @@ -44,9 +44,26 @@ def create_app(): api.init_app(app) CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) + # + # @app.cli.command("create-schema") + # def create_schema(): + # engine = model.db.session.get_bind() + # metadata = MetaData() + # metadata.reflect(bind=engine) + # table_names = [table.name for table in metadata.tables.values()] + # print(table_names) + # if len(table_names) == 0: + # with engine.begin() as conn: + # """Create the database schema.""" + # model.db.create_all() - @app.cli.command("create-schema") - def create_schema(): + @app.cli.command("destroy-schema") + def destroy_schema(): + engine = model.db.session.get_bind() + with engine.begin() as conn: + """Create the database schema.""" + model.db.drop_all() + with app.app_context(): engine = model.db.session.get_bind() metadata = MetaData() metadata.reflect(bind=engine) @@ -56,31 +73,13 @@ def create_schema(): with engine.begin() as conn: """Create the database schema.""" model.db.create_all() - @app.cli.command("destroy-schema") - def destroy_schema(): - engine = model.db.session.get_bind() - with engine.begin() as conn: - """Create the database schema.""" - model.db.drop_all() - - # - # @api.route("/") - # @api.doc(responses={404: "not found"}) - # class Home(Resource): - # def home(self): - # return "Home page" - # - return app if __name__ == "__main__": - from argparse import ArgumentParser parser = ArgumentParser() - parser.add_argument( - "-p", "--port", default=5000, type=int, help="port to listen on" - ) + parser.add_argument("-p", "--port", default=5000, type=int, help="port to listen on") args = parser.parse_args() port = args.port diff --git a/db-docker-compose.yaml b/db-docker-compose.yaml index 55588f95..98ee777f 100644 --- a/db-docker-compose.yaml +++ b/db-docker-compose.yaml @@ -10,8 +10,8 @@ services: ports: - 5432:5432 volumes: - - ./postgres-data:/var/lib/postgresql/data - - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql +# - ./postgres-data:/var/lib/postgresql/data +# - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql # pgadmin: # image: dpage/pgadmin4 # restart: always diff --git a/sql/init.sql b/sql/init.sql index 5cc5457b..bd8d6c79 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -1,66 +1,37 @@ -- -------------------------------------------------------- --- Host: 7hg.h.filess.io --- Server version: PostgreSQL 14.4 on x86_64-pc-linux-musl, compiled by gcc (Alpine 11.2.1_git20220219) 11.2.1 20220219, 64-bit --- Server OS: +-- Host: 127.0.0.1 +-- Server version: PostgreSQL 15.4 (Debian 15.4-1.pgdg120+1) on x86_64-pc-linux-gnu, compiled by gcc (Debian 12.2.0-14) 12.2.0, 64-bit +-- Server OS: -- HeidiSQL Version: 12.3.0.6589 -- -------------------------------------------------------- -BEGIN; - --- Dumping structure for table public.study -CREATE TABLE IF NOT EXISTS "study" ( - "id" CHAR(36) NOT NULL, - "title" VARCHAR NOT NULL, - "image" VARCHAR NOT NULL, - "created_at" BIGINT NOT NULL, - "updated_on" BIGINT NOT NULL, - PRIMARY KEY ("id") -); - --- Dumping structure for table public.user -CREATE TABLE IF NOT EXISTS "user" ( - "id" CHAR(36) NOT NULL, - "email_address" VARCHAR NOT NULL, - "username" VARCHAR NOT NULL, - "first_name" VARCHAR NOT NULL, - "last_name" VARCHAR NOT NULL, - "orcid" VARCHAR NOT NULL, - "hash" VARCHAR NOT NULL, - "created_at" BIGINT NOT NULL, - "institution" VARCHAR NOT NULL, - PRIMARY KEY ("id") -); - --- Dumping structure for table public.study_contributor -CREATE TABLE IF NOT EXISTS "study_contributor" ( - "permission" VARCHAR NOT NULL, - "user_id" CHAR(36) NOT NULL, - "study_id" CHAR(36) NOT NULL, - PRIMARY KEY ("user_id", "study_id"), - CONSTRAINT "study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, - CONSTRAINT "study_contributor_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping structure for table public.invited_study_contributor -CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( - "email_address" VARCHAR NOT NULL, - "permission" VARCHAR NOT NULL, - "invited_on" BIGINT NOT NULL, - "study_id" CHAR(36) NOT NULL, - PRIMARY KEY ("email_address", "study_id"), - CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET NAMES */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- Dumping structure for table public.dataset CREATE TABLE IF NOT EXISTS "dataset" ( "id" CHAR(36) NOT NULL, + "study_id" CHAR(36) NOT NULL, "updated_on" BIGINT NOT NULL, "created_at" BIGINT NOT NULL, - "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "dataset_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset: 4 rows +/*!40000 ALTER TABLE "dataset" DISABLE KEYS */; +INSERT INTO "dataset" ("id", "study_id", "updated_on", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001', 1693957896, 1693957896), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001', 1693957896, 1693957896), + ('c8b5eb7a-f939-44a3-86c1-7746e73329c4', '00000000-0000-0000-0000-000000000001', 1694031716, 1694031716); +/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; + -- Dumping structure for table public.dataset_access CREATE TABLE IF NOT EXISTS "dataset_access" ( "id" CHAR(36) NOT NULL, @@ -73,6 +44,13 @@ CREATE TABLE IF NOT EXISTS "dataset_access" ( CONSTRAINT "dataset_access_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_access: 2 rows +/*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; +INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000002'); +/*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; + -- Dumping structure for table public.dataset_alternate_identifier CREATE TABLE IF NOT EXISTS "dataset_alternate_identifier" ( "id" CHAR(36) NOT NULL, @@ -80,9 +58,17 @@ CREATE TABLE IF NOT EXISTS "dataset_alternate_identifier" ( "identifier_type" VARCHAR NOT NULL, "dataset_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), - CONSTRAINT "dataset_identifier_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "dataset_alternate_identifier_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_alternate_identifier: 3 rows +/*!40000 ALTER TABLE "dataset_alternate_identifier" DISABLE KEYS */; +INSERT INTO "dataset_alternate_identifier" ("id", "identifier", "identifier_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'), + ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', '00000000-0000-0000-0000-000000000001'), + ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_alternate_identifier" ENABLE KEYS */; + -- Dumping structure for table public.dataset_consent CREATE TABLE IF NOT EXISTS "dataset_consent" ( "id" CHAR(36) NOT NULL, @@ -98,6 +84,13 @@ CREATE TABLE IF NOT EXISTS "dataset_consent" ( CONSTRAINT "dataset_consent_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_consent: 2 rows +/*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; +INSERT INTO "dataset_consent" ("id", "type", "noncommercial", "geog_restrict", "research_type", "genetic_only", "no_methods", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'), + ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; + -- Dumping structure for table public.dataset_contributor CREATE TABLE IF NOT EXISTS "dataset_contributor" ( "id" CHAR(36) NOT NULL, @@ -114,6 +107,12 @@ CREATE TABLE IF NOT EXISTS "dataset_contributor" ( CONSTRAINT "dataset_contributor_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_contributor: 1 rows +/*!40000 ALTER TABLE "dataset_contributor" DISABLE KEYS */; +INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_contributor" ENABLE KEYS */; + -- Dumping structure for table public.dataset_contributor_affiliation CREATE TABLE IF NOT EXISTS "dataset_contributor_affiliation" ( "id" CHAR(36) NOT NULL, @@ -125,6 +124,10 @@ CREATE TABLE IF NOT EXISTS "dataset_contributor_affiliation" ( CONSTRAINT "dataset_contributor_affiliation_dataset_contributor_id_fkey" FOREIGN KEY ("dataset_contributor_id") REFERENCES "dataset_contributor" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_contributor_affiliation: 0 rows +/*!40000 ALTER TABLE "dataset_contributor_affiliation" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_contributor_affiliation" ENABLE KEYS */; + -- Dumping structure for table public.dataset_date CREATE TABLE IF NOT EXISTS "dataset_date" ( "id" CHAR(36) NOT NULL, @@ -136,6 +139,12 @@ CREATE TABLE IF NOT EXISTS "dataset_date" ( CONSTRAINT "dataset_date_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_date: 1 rows +/*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; +INSERT INTO "dataset_date" ("id", "date", "date_type", "data_information", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; + -- Dumping structure for table public.dataset_description CREATE TABLE IF NOT EXISTS "dataset_description" ( "id" CHAR(36) NOT NULL, @@ -146,6 +155,12 @@ CREATE TABLE IF NOT EXISTS "dataset_description" ( CONSTRAINT "dataset_description_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_description: 1 rows +/*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; +INSERT INTO "dataset_description" ("id", "description", "description_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; + -- Dumping structure for table public.dataset_de_ident_level CREATE TABLE IF NOT EXISTS "dataset_de_ident_level" ( "id" CHAR(36) NOT NULL, @@ -161,6 +176,14 @@ CREATE TABLE IF NOT EXISTS "dataset_de_ident_level" ( CONSTRAINT "dataset_de_ident_level_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_de_ident_level: 3 rows +/*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; +INSERT INTO "dataset_de_ident_level" ("id", "type", "direct", "hipaa", "dates", "nonarr", "k_anon", "details", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', '00000000-0000-0000-0000-000000000001'), + ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'), + ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; + -- Dumping structure for table public.dataset_funder CREATE TABLE IF NOT EXISTS "dataset_funder" ( "id" CHAR(36) NOT NULL, @@ -176,6 +199,12 @@ CREATE TABLE IF NOT EXISTS "dataset_funder" ( CONSTRAINT "dataset_funder_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_funder: 1 rows +/*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; +INSERT INTO "dataset_funder" ("id", "name", "identifier", "identifier_type", "identifier_scheme_uri", "award_number", "award_uri", "award_title", "dataset_id") VALUES + ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; + -- Dumping structure for table public.dataset_managing_organization CREATE TABLE IF NOT EXISTS "dataset_managing_organization" ( "id" CHAR(36) NOT NULL, @@ -186,13 +215,20 @@ CREATE TABLE IF NOT EXISTS "dataset_managing_organization" ( CONSTRAINT "dataset_managing_organization_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_managing_organization: 2 rows +/*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; +INSERT INTO "dataset_managing_organization" ("id", "name", "ror_id", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', '00000000-0000-0000-0000-000000000001'), + ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; + -- Dumping structure for table public.dataset_other CREATE TABLE IF NOT EXISTS "dataset_other" ( "id" CHAR(36) NOT NULL, "language" VARCHAR NOT NULL, "managing_organization_name" VARCHAR NOT NULL, "managing_organization_ror_id" VARCHAR NOT NULL, - "size" VARCHAR NOT NULL, + "size" UNKNOWN NOT NULL, "standards_followed" VARCHAR NOT NULL, "acknowledgement" VARCHAR NOT NULL, "dataset_id" CHAR(36) NOT NULL, @@ -200,6 +236,12 @@ CREATE TABLE IF NOT EXISTS "dataset_other" ( CONSTRAINT "dataset_other_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_other: 1 rows +/*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; +INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "managing_organization_ror_id", "size", "standards_followed", "acknowledgement", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; + -- Dumping structure for table public.dataset_readme CREATE TABLE IF NOT EXISTS "dataset_readme" ( "id" CHAR(36) NOT NULL, @@ -209,6 +251,12 @@ CREATE TABLE IF NOT EXISTS "dataset_readme" ( CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_readme: 1 rows +/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; + -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( "id" CHAR(36) NOT NULL, @@ -219,6 +267,15 @@ CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( CONSTRAINT "dataset_record_keys_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_record_keys: 4 rows +/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; +INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES + ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), + ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), + ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; + -- Dumping structure for table public.dataset_related_item CREATE TABLE IF NOT EXISTS "dataset_related_item" ( "id" CHAR(36) NOT NULL, @@ -229,6 +286,13 @@ CREATE TABLE IF NOT EXISTS "dataset_related_item" ( CONSTRAINT "dataset_related_item_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_related_item: 2 rows +/*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; +INSERT INTO "dataset_related_item" ("id", "type", "relation_type", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'main', 'main', '00000000-0000-0000-0000-000000000002'), + ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; + -- Dumping structure for table public.dataset_related_item_contributor CREATE TABLE IF NOT EXISTS "dataset_related_item_contributor" ( "id" CHAR(36) NOT NULL, @@ -241,6 +305,12 @@ CREATE TABLE IF NOT EXISTS "dataset_related_item_contributor" ( CONSTRAINT "dataset_related_item_contributor_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_related_item_contributor: 1 rows +/*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; +INSERT INTO "dataset_related_item_contributor" ("id", "name", "name_type", "creator", "contributor_type", "dataset_related_item_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'string', 'true', 'owner', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; + -- Dumping structure for table public.dataset_related_item_identifier CREATE TABLE IF NOT EXISTS "dataset_related_item_identifier" ( "id" CHAR(36) NOT NULL, @@ -254,6 +324,10 @@ CREATE TABLE IF NOT EXISTS "dataset_related_item_identifier" ( CONSTRAINT "dataset_related_item_identifier_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_related_item_identifier: 0 rows +/*!40000 ALTER TABLE "dataset_related_item_identifier" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_identifier" ENABLE KEYS */; + -- Dumping structure for table public.dataset_related_item_other CREATE TABLE IF NOT EXISTS "dataset_related_item_other" ( "id" CHAR(36) NOT NULL, @@ -271,6 +345,10 @@ CREATE TABLE IF NOT EXISTS "dataset_related_item_other" ( CONSTRAINT "dataset_related_item_other_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_related_item_other: 0 rows +/*!40000 ALTER TABLE "dataset_related_item_other" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_other" ENABLE KEYS */; + -- Dumping structure for table public.dataset_related_item_title CREATE TABLE IF NOT EXISTS "dataset_related_item_title" ( "id" CHAR(36) NOT NULL, @@ -281,6 +359,10 @@ CREATE TABLE IF NOT EXISTS "dataset_related_item_title" ( CONSTRAINT "dataset_related_item_title_dataset_related_item_id_fkey" FOREIGN KEY ("dataset_related_item_id") REFERENCES "dataset_related_item" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_related_item_title: 0 rows +/*!40000 ALTER TABLE "dataset_related_item_title" DISABLE KEYS */; +/*!40000 ALTER TABLE "dataset_related_item_title" ENABLE KEYS */; + -- Dumping structure for table public.dataset_rights CREATE TABLE IF NOT EXISTS "dataset_rights" ( "id" CHAR(36) NOT NULL, @@ -293,6 +375,13 @@ CREATE TABLE IF NOT EXISTS "dataset_rights" ( CONSTRAINT "dataset_rights_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_rights: 2 rows +/*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; +INSERT INTO "dataset_rights" ("id", "rights", "uri", "identifier", "identifier_scheme", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'), + ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; + -- Dumping structure for table public.dataset_subject CREATE TABLE IF NOT EXISTS "dataset_subject" ( "id" CHAR(36) NOT NULL, @@ -306,6 +395,13 @@ CREATE TABLE IF NOT EXISTS "dataset_subject" ( CONSTRAINT "dataset_subject_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_subject: 2 rows +/*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; +INSERT INTO "dataset_subject" ("id", "subject", "scheme", "scheme_uri", "value_uri", "classification_code", "dataset_id") VALUES + ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'), + ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; + -- Dumping structure for table public.dataset_title CREATE TABLE IF NOT EXISTS "dataset_title" ( "id" CHAR(36) NOT NULL, @@ -316,6 +412,29 @@ CREATE TABLE IF NOT EXISTS "dataset_title" ( CONSTRAINT "dataset_title_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.dataset_title: 1 rows +/*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; +INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES + ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; + +-- Dumping structure for table public.invited_study_contributor +CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( + "email_address" VARCHAR NOT NULL, + "permission" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, + "invited_on" BIGINT NOT NULL, + PRIMARY KEY ("email_address", "study_id"), + CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.invited_study_contributor: 3 rows +/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; +INSERT INTO "invited_study_contributor" ("email_address", "permission", "study_id", "invited_on") VALUES + ('Aliya_Herman@yahoo.com', 'editor', '00000000-0000-0000-0000-000000000001', 1693805470), + ('Anastacio50@hotmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), + ('Edward0@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470); +/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; -- Dumping structure for table public.participant CREATE TABLE IF NOT EXISTS "participant" ( @@ -324,25 +443,68 @@ CREATE TABLE IF NOT EXISTS "participant" ( "last_name" VARCHAR NOT NULL, "address" VARCHAR NOT NULL, "age" VARCHAR NOT NULL, + "study_id" CHAR(36) NOT NULL, "created_at" BIGINT NOT NULL, "updated_on" BIGINT NOT NULL, - "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "participant_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.participant: 4 rows +/*!40000 ALTER TABLE "participant" DISABLE KEYS */; +INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "study_id", "created_at", "updated_on") VALUES + ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('c1f24707-e909-45e5-9b44-fd35c0ad62be', 'bhavesh', 'patel', '3904 university ave', '20', '00000000-0000-0000-0000-000000000001', 1694032113, 1694032113); +/*!40000 ALTER TABLE "participant" ENABLE KEYS */; + +-- Dumping structure for table public.study +CREATE TABLE IF NOT EXISTS "study" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "image" VARCHAR NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("id") +); + +-- Dumping data for table public.study: 4 rows +/*!40000 ALTER TABLE "study" DISABLE KEYS */; +INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES + ('58b32765-89bd-4815-8611-a465928581cd', 'Study for testing 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1694326095, 1694326095), + ('bd5021d4-0b5a-49ce-af7c-6d687af6c499', 'Study for testing 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1694326100, 1694402433), + ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1693805470, 1694481091), + ('bc66e697-83b6-4750-9c44-4ad6ed3ba0aa', 'Study for testing 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1694482530, 1694482795); +/*!40000 ALTER TABLE "study" ENABLE KEYS */; + -- Dumping structure for table public.study_arm CREATE TABLE IF NOT EXISTS "study_arm" ( "id" CHAR(36) NOT NULL, "label" VARCHAR NOT NULL, "type" VARCHAR NOT NULL, "description" VARCHAR NOT NULL, - "intervention_list" VARCHAR[] NOT NULL, + "intervention_list" UNKNOWN NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NULL DEFAULT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_arm_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_arm: 9 rows +/*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; +INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694481091), + ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694481091), + ('6132dee8-ef7e-499b-b941-f9d4843e43ea', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694481091), + ('30f71daf-8438-4920-8e8e-6780a773dc79', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694481095), + ('2bbe08d3-ada0-4687-bdce-b08698f3e568', 'arm2', 'Experimental', 'Lorem Ipsum', '{inter1,"intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694481091), + ('26854b50-abd9-4e27-90c6-75bf46d62386', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694481091), + ('036b257e-5036-436e-ab53-24048ce927de', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa', 1694481089), + ('dd222e97-4e1b-4acf-9993-fa6fe10c2cca', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa', 1694482795), + ('ddce222f-40a3-45f0-be49-add3f330e5f0', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa', 1694482795); +/*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; + -- Dumping structure for table public.study_available_ipd CREATE TABLE IF NOT EXISTS "study_available_ipd" ( "id" CHAR(36) NOT NULL, @@ -351,25 +513,65 @@ CREATE TABLE IF NOT EXISTS "study_available_ipd" ( "url" VARCHAR NOT NULL, "comment" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), - CONSTRAINT "study_available_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "study_available_ipd_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_available_ipd: 6 rows +/*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; +INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694326095), + ('82ecf854-bd1a-4cad-8df0-e18d9c231c88', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694326095), + ('8be26f29-9a0a-4b03-9afd-6239aaff1fcd', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694326095), + ('86d604d0-2336-4421-862f-a43e0dfd24a5', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694478302), + ('700d449d-d08d-43a4-820e-63738011ef8b', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694478303), + ('8437d711-dc49-4b9d-8998-b8b238fd712e', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694478340); +/*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; + -- Dumping structure for table public.study_contact CREATE TABLE IF NOT EXISTS "study_contact" ( "id" CHAR(36) NOT NULL, "name" VARCHAR NOT NULL, "affiliation" VARCHAR NOT NULL, - "role" VARCHAR, + "role" VARCHAR NULL DEFAULT NULL, "phone" VARCHAR NOT NULL, "phone_ext" VARCHAR NOT NULL, "email_address" VARCHAR NOT NULL, "central_contact" BOOLEAN NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_contact_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_contact: 3 rows +/*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; +INSERT INTO "study_contact" ("id", "name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', 'true', '00000000-0000-0000-0000-000000000001', 1694478303), + ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', 'false', '00000000-0000-0000-0000-000000000001', 1694478303), + ('aff6fdc4-cdd0-4555-ad22-0292aeb23e15', 'Dejah', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', 'true', '00000000-0000-0000-0000-000000000001', 1694480988); +/*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; + +-- Dumping structure for table public.study_contributor +CREATE TABLE IF NOT EXISTS "study_contributor" ( + "permission" VARCHAR NOT NULL, + "user_id" CHAR(36) NOT NULL, + "study_id" CHAR(36) NOT NULL, + PRIMARY KEY ("user_id", "study_id"), + CONSTRAINT "study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_contributor_user_id_fkey" FOREIGN KEY ("user_id") REFERENCES "user" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_contributor: 4 rows +/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; +INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES + ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), + ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), + ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; + -- Dumping structure for table public.study_description CREATE TABLE IF NOT EXISTS "study_description" ( "id" CHAR(36) NOT NULL, @@ -380,52 +582,74 @@ CREATE TABLE IF NOT EXISTS "study_description" ( CONSTRAINT "study_description_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_description: 3 rows +/*!40000 ALTER TABLE "study_description" DISABLE KEYS */; +INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'study summary', 'this dataset has been created sintethically, therefore all study metadata files do not operates as expected', '00000000-0000-0000-0000-000000000001'), + ('1e7e65f8-e159-4983-81fa-15543108796f', '', 'in this dataset all metadata connection is set and works well', 'bd5021d4-0b5a-49ce-af7c-6d687af6c499'), + ('0fee172a-d11b-423a-9ecb-1ff43a7a12d2', '', '', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa'); +/*!40000 ALTER TABLE "study_description" ENABLE KEYS */; + -- Dumping structure for table public.study_design CREATE TABLE IF NOT EXISTS "study_design" ( "id" CHAR(36) NOT NULL, - "design_allocation" VARCHAR, + "design_allocation" VARCHAR NULL DEFAULT NULL, "study_type" VARCHAR NOT NULL, - "design_intervention_model" VARCHAR, - "design_intervention_model_description" VARCHAR, - "design_primary_purpose" VARCHAR, - "design_masking" VARCHAR, - "design_masking_description" VARCHAR, - "design_who_masked_list" VARCHAR[], - "phase_list" VARCHAR[], + "design_intervention_model" VARCHAR NULL DEFAULT NULL, + "design_intervention_model_description" VARCHAR NULL DEFAULT NULL, + "design_primary_purpose" VARCHAR NULL DEFAULT NULL, + "design_masking" VARCHAR NULL DEFAULT NULL, + "design_masking_description" VARCHAR NULL DEFAULT NULL, + "design_who_masked_list" UNKNOWN NULL DEFAULT NULL, + "phase_list" UNKNOWN NULL DEFAULT NULL, "enrollment_count" INTEGER NOT NULL, "enrollment_type" VARCHAR NOT NULL, - "number_arms" INTEGER, - "design_observational_model_list" VARCHAR[], - "design_time_perspective_list" VARCHAR[], - "bio_spec_retention" VARCHAR, - "bio_spec_description" VARCHAR, - "target_duration" VARCHAR, - "number_groups_cohorts" INTEGER, + "number_arms" INTEGER NULL DEFAULT NULL, + "design_observational_model_list" UNKNOWN NULL DEFAULT NULL, + "design_time_perspective_list" UNKNOWN NULL DEFAULT NULL, + "bio_spec_retention" VARCHAR NULL DEFAULT NULL, + "bio_spec_description" VARCHAR NULL DEFAULT NULL, + "target_duration" VARCHAR NULL DEFAULT NULL, + "number_groups_cohorts" INTEGER NULL DEFAULT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_design_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping structure for table public.study_eligibilitya +-- Dumping data for table public.study_design: 1 rows +/*!40000 ALTER TABLE "study_design" DISABLE KEYS */; +INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_intervention_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES + ('169610d0-7b7a-4bdc-a765-373d53dbcf7d', '', '', '', '', '', '', '', '{}', '{}', 0, '', 0, '{}', '{}', '', '', '', 0, 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa'); +/*!40000 ALTER TABLE "study_design" ENABLE KEYS */; + +-- Dumping structure for table public.study_eligibility CREATE TABLE IF NOT EXISTS "study_eligibility" ( "id" CHAR(36) NOT NULL, "gender" VARCHAR NOT NULL, "gender_based" VARCHAR NOT NULL, "gender_description" VARCHAR NOT NULL, - "healthy_volunteers" VARCHAR NOT NULL, - "inclusion_criteria" VARCHAR[] NOT NULL, - "exclusion_criteria" VARCHAR[] NOT NULL, - "study_population" VARCHAR, - "sampling_method" VARCHAR, - "study_id" CHAR(36) NOT NULL, "minimum_age_value" INTEGER NOT NULL, - "minimum_age_unit" VARCHAR NOT NULL, "maximum_age_value" INTEGER NOT NULL, + "minimum_age_unit" VARCHAR NOT NULL, "maximum_age_unit" VARCHAR NOT NULL, + "healthy_volunteers" VARCHAR NULL DEFAULT NULL, + "inclusion_criteria" UNKNOWN NOT NULL, + "exclusion_criteria" UNKNOWN NOT NULL, + "study_population" VARCHAR NULL DEFAULT NULL, + "sampling_method" VARCHAR NULL DEFAULT NULL, + "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_eligibility_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_eligibility: 3 rows +/*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; +INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "minimum_age_value", "maximum_age_value", "minimum_age_unit", "maximum_age_unit", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id") VALUES + ('4515e037-95df-4db1-9f5a-06039f819dc9', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', '58b32765-89bd-4815-8611-a465928581cd'), + ('243aa114-e807-4747-90ab-eca859d9727d', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', 'bd5021d4-0b5a-49ce-af7c-6d687af6c499'), + ('2bf3552f-803e-41fc-8292-6435a1e48a12', '', '', '', 18, 60, '', '', '', '{}', '{}', '', '', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa'); +/*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; + -- Dumping structure for table public.study_identification CREATE TABLE IF NOT EXISTS "study_identification" ( "id" CHAR(36) NOT NULL, @@ -435,29 +659,52 @@ CREATE TABLE IF NOT EXISTS "study_identification" ( "identifier_link" VARCHAR NOT NULL, "secondary" BOOLEAN NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_identification_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_identification: 6 rows +/*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; +INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001', 1694478303), + ('e5b12255-7492-4c81-b1b2-a835fead0eae', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001', 1694478303), + ('b8d29648-48cd-471f-aae8-9aa144a928b3', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001', 1694478303), + ('2b9b0157-7451-43b9-9256-ef1aef2211ae', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001', 1694478305), + ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000001', 1694478303), + ('fdd46a5d-a31d-4d94-bb41-0ff705ad5885', 'ADF89ADS', 'NIH Grant Numberu', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000001', 1694478303); +/*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; + -- Dumping structure for table public.study_intervention CREATE TABLE IF NOT EXISTS "study_intervention" ( "id" CHAR(36) NOT NULL, "type" VARCHAR NOT NULL, "name" VARCHAR NOT NULL, "description" VARCHAR NOT NULL, - "arm_group_label_list" VARCHAR[] NOT NULL, - "other_name_list" VARCHAR[] NOT NULL, + "arm_group_label_list" UNKNOWN NOT NULL, + "other_name_list" UNKNOWN NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_intervention_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_intervention: 5 rows +/*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; +INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001', 1694478303), + ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001', 1694478303), + ('00cc3bcc-ff98-4ac4-843a-de4c947285ee', 'Biological/Vaccine', 'biomedical', '', '{"labels of arms"}', '{}', 'bd5021d4-0b5a-49ce-af7c-6d687af6c499', 1694478303), + ('9b21ddc7-f670-463e-8a9b-0f2585ce561f', 'Drug', 'Test Name2', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001', 1694478693), + ('c75443cd-d936-4323-9604-3707e3c608e1', 'Drug', 'Test Name2', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001', 1694478695); +/*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; + -- Dumping structure for table public.study_ipdsharing CREATE TABLE IF NOT EXISTS "study_ipdsharing" ( "id" CHAR(36) NOT NULL, "ipd_sharing" VARCHAR NOT NULL, "ipd_sharing_description" VARCHAR NOT NULL, - "ipd_sharing_info_type_list" VARCHAR[] NOT NULL, + "ipd_sharing_info_type_list" UNKNOWN NOT NULL, "ipd_sharing_time_frame" VARCHAR NOT NULL, "ipd_sharing_access_criteria" VARCHAR NOT NULL, "ipd_sharing_url" VARCHAR NOT NULL, @@ -466,16 +713,35 @@ CREATE TABLE IF NOT EXISTS "study_ipdsharing" ( CONSTRAINT "study_ipdsharing_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_ipdsharing: 4 rows +/*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; +INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Yes', 'Lorem Ipsum', '{"Study Protocol"}', 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), + ('27a8dd6c-0733-4480-b17c-1d9145f9fea4', '', '', '{}', '', '', '', '58b32765-89bd-4815-8611-a465928581cd'), + ('cd07c266-017e-4c38-b348-a387e0781770', '', '', '{}', '', '', '', 'bd5021d4-0b5a-49ce-af7c-6d687af6c499'), + ('f1aa4fbf-ef13-4e3b-a8e8-4e89c71a3a98', '', '', '{}', '', '', '', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa'); +/*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; + -- Dumping structure for table public.study_link CREATE TABLE IF NOT EXISTS "study_link" ( "id" CHAR(36) NOT NULL, "url" VARCHAR NOT NULL, "title" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_link_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_link: 4 rows +/*!40000 ALTER TABLE "study_link" DISABLE KEYS */; +INSERT INTO "study_link" ("id", "url", "title", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001', 1694478303), + ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001', 1694478303), + ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001', 1694478304), + ('d499238e-da50-4eb2-945d-06770979a88f', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001', 1694481091); +/*!40000 ALTER TABLE "study_link" ENABLE KEYS */; + -- Dumping structure for table public.study_location CREATE TABLE IF NOT EXISTS "study_location" ( "id" CHAR(36) NOT NULL, @@ -486,22 +752,39 @@ CREATE TABLE IF NOT EXISTS "study_location" ( "zip" VARCHAR NOT NULL, "country" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_location_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_location: 4 rows +/*!40000 ALTER TABLE "study_location" DISABLE KEYS */; +INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001', 1694478303), + ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001', 1694478303), + ('a3c03f56-885b-4616-a406-53ec7cafd129', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001', 1694480780), + ('b3071ea9-7208-408f-bbf5-285eeec819fc', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001', 1694480785); +/*!40000 ALTER TABLE "study_location" ENABLE KEYS */; + -- Dumping structure for table public.study_other CREATE TABLE IF NOT EXISTS "study_other" ( "id" CHAR(36) NOT NULL, "oversight_has_dmc" BOOLEAN NOT NULL, - "conditions" VARCHAR[] NOT NULL, - "keywords" VARCHAR[] NOT NULL, + "conditions" UNKNOWN NOT NULL, + "keywords" UNKNOWN NOT NULL, "size" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_other_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_other: 2 rows +/*!40000 ALTER TABLE "study_other" DISABLE KEYS */; +INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'true', '{"condition 1"}', '{"keyword 1"}', '1 GB', '00000000-0000-0000-0000-000000000001'), + ('0601da90-1050-4e93-8d28-a264bae0405c', 'false', '{}', '{}', '', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa'); +/*!40000 ALTER TABLE "study_other" ENABLE KEYS */; + -- Dumping structure for table public.study_overall_official CREATE TABLE IF NOT EXISTS "study_overall_official" ( "id" CHAR(36) NOT NULL, @@ -509,10 +792,18 @@ CREATE TABLE IF NOT EXISTS "study_overall_official" ( "affiliation" VARCHAR NOT NULL, "role" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), - CONSTRAINT "study_overall_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "study_overall_official_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_overall_official: 2 rows +/*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; +INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001', 1693805470), + ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001', 1693805471); +/*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; + -- Dumping structure for table public.study_reference CREATE TABLE IF NOT EXISTS "study_reference" ( "id" CHAR(36) NOT NULL, @@ -520,10 +811,20 @@ CREATE TABLE IF NOT EXISTS "study_reference" ( "type" VARCHAR NOT NULL, "citation" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_reference_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_reference: 4 rows +/*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; +INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001', 1694478303), + ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'none', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001', 1694478303), + ('f82bec7f-7258-4294-9e6b-30ec4eec3398', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001', 1694481032), + ('18428285-302f-46e0-aef8-f42b27528591', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001', 1694481055); +/*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; + -- Dumping structure for table public.study_sponsors_collaborators CREATE TABLE IF NOT EXISTS "study_sponsors_collaborators" ( "id" CHAR(36) NOT NULL, @@ -532,41 +833,89 @@ CREATE TABLE IF NOT EXISTS "study_sponsors_collaborators" ( "responsible_party_investigator_title" VARCHAR NOT NULL, "responsible_party_investigator_affiliation" VARCHAR NOT NULL, "lead_sponsor_name" VARCHAR NOT NULL, - "collaborator_name" VARCHAR[] NOT NULL, + "collaborator_name" UNKNOWN NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_sponsors_collaborators_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_sponsors_collaborators: 2 rows +/*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; +INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name", "collaborator_name", "study_id") VALUES + ('6aa629ef-937c-4a80-ab27-72815d1e75f6', '', '', '', '', '', '{}', '58b32765-89bd-4815-8611-a465928581cd'), + ('3f09668c-e774-49b0-a733-c48741540373', '', '', '', '', '', '{}', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa'); +/*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; + -- Dumping structure for table public.study_status CREATE TABLE IF NOT EXISTS "study_status" ( "id" CHAR(36) NOT NULL, "overall_status" VARCHAR NOT NULL, "why_stopped" VARCHAR NOT NULL, - "start_date" TIMESTAMP NOT NULL, + "start_date" TIMESTAMP NULL DEFAULT NULL, "start_date_type" VARCHAR NOT NULL, - "completion_date" TIMESTAMP NOT NULL, + "completion_date" TIMESTAMP NULL DEFAULT NULL, "completion_date_type" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.study_status: 3 rows +/*!40000 ALTER TABLE "study_status" DISABLE KEYS */; +INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES + ('00000000-0000-0000-0000-000000000001', 'Recruiting new', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Anticipated', '00000000-0000-0000-0000-000000000001'), + ('22da82ad-bcc8-4a5d-bdf1-ee8d69fff175', '', '', NULL, '', NULL, '', 'bd5021d4-0b5a-49ce-af7c-6d687af6c499'), + ('0a85e95b-b4aa-4e27-9d80-1cecdfb5ae35', '', '', NULL, '', NULL, '', 'bc66e697-83b6-4750-9c44-4ad6ed3ba0aa'); +/*!40000 ALTER TABLE "study_status" ENABLE KEYS */; + +-- Dumping structure for table public.user +CREATE TABLE IF NOT EXISTS "user" ( + "id" CHAR(36) NOT NULL, + "email_address" VARCHAR NOT NULL, + "username" VARCHAR NOT NULL, + "first_name" VARCHAR NOT NULL, + "last_name" VARCHAR NOT NULL, + "orcid" VARCHAR NOT NULL, + "hash" VARCHAR NOT NULL, + "institution" VARCHAR NOT NULL, + "created_at" BIGINT NOT NULL, + PRIMARY KEY ("id") +); + +-- Dumping data for table public.user: 5 rows +/*!40000 ALTER TABLE "user" DISABLE KEYS */; +INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "institution", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', 'Schinner, Kuvalis and Beatty', 1693805470), + ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', 'Schmitt Inc', 1693805470), + ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', 'Stracke, Leuschke and Kuvalis', 1693805470), + ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', 'Heidenreich, Wilkinson and Mitchell', 1693805470), + ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', 'Heaney, Russel and Turner', 1693805470); +/*!40000 ALTER TABLE "user" ENABLE KEYS */; + -- Dumping structure for table public.version CREATE TABLE IF NOT EXISTS "version" ( "id" CHAR(36) NOT NULL, "title" VARCHAR NOT NULL, "published" BOOLEAN NOT NULL, "changelog" VARCHAR NOT NULL, - "updated_on" BIGINT NOT NULL, "doi" VARCHAR NOT NULL, - "created_at" BIGINT NOT NULL, - "published_on" BIGINT NOT NULL, + "published_on" TIMESTAMP NOT NULL, "dataset_id" CHAR(36) NOT NULL, + "updated_on" BIGINT NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), - CONSTRAINT "dataset_version_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "version_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); +-- Dumping data for table public.version: 4 rows +/*!40000 ALTER TABLE "version" DISABLE KEYS */; +INSERT INTO "version" ("id", "title", "published", "changelog", "doi", "published_on", "dataset_id", "updated_on", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000001', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000002', 1693805470, 1693805470), + ('00000000-0000-0000-0000-000000000004', 'Version 1', 'false', 'lorem ipsum', '2435464e643', '2023-08-13 16:24:00', '00000000-0000-0000-0000-000000000003', 1693805470, 1693805470); +/*!40000 ALTER TABLE "version" ENABLE KEYS */; + -- Dumping structure for table public.version_participants CREATE TABLE IF NOT EXISTS "version_participants" ( "dataset_version_id" CHAR(36) NOT NULL, @@ -576,396 +925,12 @@ CREATE TABLE IF NOT EXISTS "version_participants" ( CONSTRAINT "version_participants_participant_id_fkey" FOREIGN KEY ("participant_id") REFERENCES "participant" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping data for table public.study: -1 rows --- done -/*!40000 ALTER TABLE "study" DISABLE KEYS */; -INSERT INTO "study" ("id", "title", "image", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', 'study 1', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=1', 1693957896, 1693957896), - ('00000000-0000-0000-0000-000000000002', 'study 2', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=2', 1693957896, 1693957896), - ('00000000-0000-0000-0000-000000000003', 'study 3', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=3', 1693957896, 1693957896), - ('00000000-0000-0000-0000-000000000004', 'study 4', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=4', 1693957896, 1693957896), - ('00000000-0000-0000-0000-000000000005', 'study 5', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=5', 1693957896, 1693957896), - ('00000000-0000-0000-0000-000000000006', 'study 6', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=6', 1693957896, 1693957896), - ('00000000-0000-0000-0000-000000000007', 'study 7', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=7', 1693957896, 1693957896), - ('00000000-0000-0000-0000-000000000008', 'study 8', 'https://api.dicebear.com/7.x/fun-emoji/svg?seed=8', 1693957896, 1693957896); -/*!40000 ALTER TABLE "study" ENABLE KEYS */; - --- Dumping data for table public.user: -1 rows --- done -/*!40000 ALTER TABLE "user" DISABLE KEYS */; -INSERT INTO "user" ("id", "email_address", "username", "first_name", "last_name", "orcid", "hash", "created_at", "institution") VALUES - ('00000000-0000-0000-0000-000000000001', 'Ervin_Lindgren@hotmail.com', 'Ervin79', 'Ervin', 'Lindgren', 'd348206e-b1e2-4f99-9157-44b1321ecb4c', 'hashed', 1693957896, 'Schinner, Kuvalis and Beatty'), - ('00000000-0000-0000-0000-000000000002', 'Camila.Pacocha@hotmail.com', 'Camila_Pacocha', 'Camila', 'Pacocha', '699e9977-5d86-40fc-bf1a-a5083f0cdc95', 'hashed', 1693957896, 'Schmitt Inc'), - ('00000000-0000-0000-0000-000000000003', 'Alaina.Hammes@hotmail.com', 'Alaina_Hammes', 'Alaina', 'Hammes', '0b39872c-a1d6-44c0-88c2-7ea1b3a33dcf', 'hashed', 1693957896, 'Stracke, Leuschke and Kuvalis'), - ('00000000-0000-0000-0000-000000000004', 'Brady_Anderson@gmail.com', 'Brady_Anderson', 'Brady', 'Anderson', '779d42d2-4743-43d3-980b-fcf1a962b485', 'hashed', 1693957896, 'Heidenreich, Wilkinson and Mitchell'), - ('00000000-0000-0000-0000-000000000005', 'Brycen78@hotmail.com', 'Brycen_OReilly64', 'Brycen', 'O''Reilly', '529053dc-a755-4819-bdd2-a593d41e7f73', 'hashed', 1693957896, 'Heaney, Russel and Turner'); -/*!40000 ALTER TABLE "user" ENABLE KEYS */; - --- Dumping data for table public.study_contributor: -1 rows --- done -/*!40000 ALTER TABLE "study_contributor" DISABLE KEYS */; -INSERT INTO "study_contributor" ("permission", "user_id", "study_id") VALUES - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), - ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'), - ('editor', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000001'), - ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000001'), - ('owner', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), - ('viewer', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), - ('viewer', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000002'), - ('owner', '00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000003'), - ('viewer', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000003'), - ('editor', '00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000003'), - ('owner', '00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000004'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000005'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007'), - ('owner', '00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000008'); -/*!40000 ALTER TABLE "study_contributor" ENABLE KEYS */; - --- Dumping data for table public.invited_study_contributor: -1 rows --- done -/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; -INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES - ('Aliya_Herman@yahoo.com', 'editor', 1693957896, '00000000-0000-0000-0000-000000000001'), - ('Anastacio50@hotmail.com', 'viewer', 1693957896, '00000000-0000-0000-0000-000000000001'), - ('Edward0@gmail.com', 'viewer', 1693957896, '00000000-0000-0000-0000-000000000001'), - ('Jailyn17@gmail.com', 'viewer', 1693957896, '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; - --- Dumping data for table public.dataset: -1 rows --- done -/*!40000 ALTER TABLE "dataset" DISABLE KEYS */; -INSERT INTO "dataset" ("id", "updated_on", "created_at", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000005', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000006', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000003'); -/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; - --- Dumping data for table public.dataset_access: -1 rows -/*!40000 ALTER TABLE "dataset_access" DISABLE KEYS */; -INSERT INTO "dataset_access" ("id", "type", "description", "url", "url_last_checked", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'PublicOnScreenAccess', '', '', '', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "dataset_access" ENABLE KEYS */; - --- Dumping data for table public.dataset_alternate_identifier: 3 rows -/*!40000 ALTER TABLE "dataset_alternate_identifier" DISABLE KEYS */; -INSERT INTO "dataset_alternate_identifier" ("id", "identifier", "identifier_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', '126543GF3', 'GRID', '00000000-0000-0000-0000-000000000001'), - ('77df307c-eb87-450b-b5d5-7d75bfb88cf7', 'N/A', 'N/A', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_alternate_identifier" ENABLE KEYS */; - --- Dumping data for table public.dataset_consent: -1 rows -/*!40000 ALTER TABLE "dataset_consent" DISABLE KEYS */; -INSERT INTO "dataset_consent" ("id", "type", "noncommercial", "geog_restrict", "research_type", "genetic_only", "no_methods", "details", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'), - ('f38a6bae-8724-411d-999a-f587cfdd32bf', 'none', 'true', 'true', 'true', 'false', 'false', 'na', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_consent" ENABLE KEYS */; - --- Dumping data for table public.dataset_contributor: -1 rows -/*!40000 ALTER TABLE "dataset_contributor" DISABLE KEYS */; -INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", "name_identifier", "name_identifier_scheme", "name_identifier_scheme_uri", "creator", "contributor_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_contributor" ENABLE KEYS */; - --- Dumping data for table public.dataset_contributor_affiliation: -1 rows -/*!40000 ALTER TABLE "dataset_contributor_affiliation" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_contributor_affiliation" ENABLE KEYS */; - --- Dumping data for table public.dataset_date: -1 rows -/*!40000 ALTER TABLE "dataset_date" DISABLE KEYS */; -INSERT INTO "dataset_date" ("id", "date", "date_type", "data_information", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', '2023', 'day', 'none', '00000000-0000-0000-0000-000000000005'), - ('0b1775e5-d110-482f-a1c4-2aa3947b8db8', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'), - ('dc090dbd-6fa3-4b61-829e-2f139bdbd116', '', 'na', 'none', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_date" ENABLE KEYS */; - --- Dumping data for table public.dataset_description: -1 rows -/*!40000 ALTER TABLE "dataset_description" DISABLE KEYS */; -INSERT INTO "dataset_description" ("id", "description", "description_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'AI-READI is one of the data generation projects of the National Institutes of Health (NIH) funded Bridge2AI Program.', 'object', '00000000-0000-0000-0000-000000000004'), - ('78f2b774-2f5a-4096-b82e-9923ca04395b', '', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', '', '', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_description" ENABLE KEYS */; - --- Dumping data for table public.dataset_de_ident_level: -1 rows -/*!40000 ALTER TABLE "dataset_de_ident_level" DISABLE KEYS */; -INSERT INTO "dataset_de_ident_level" ("id", "type", "direct", "hipaa", "dates", "nonarr", "k_anon", "details", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', '', 'false', 'true', 'false', 'true', 'false', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'NA', 'false', 'true', 'false', 'true', 'false', 'none', '00000000-0000-0000-0000-000000000002'), - ('1bc4eeb0-dcdf-41af-b8e9-d05923ba45fa', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'), - ('a3f40ca7-4f34-43b5-9e44-fc20e8f50eef', '', 'true', 'true', 'true', 'true', 'true', '', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_de_ident_level" ENABLE KEYS */; - --- Dumping data for table public.dataset_funder: -1 rows -/*!40000 ALTER TABLE "dataset_funder" DISABLE KEYS */; -INSERT INTO "dataset_funder" ("id", "name", "identifier", "identifier_type", "identifier_scheme_uri", "award_number", "award_uri", "award_title", "dataset_id") VALUES - ('8ef6d41f-2f59-492c-9f28-8c1c10bcc4e8', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', 'N/A', '', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_funder" ENABLE KEYS */; - --- Dumping data for table public.dataset_managing_organization: -1 rows -/*!40000 ALTER TABLE "dataset_managing_organization" DISABLE KEYS */; -INSERT INTO "dataset_managing_organization" ("id", "name", "ror_id", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'UCSD', '354grhji5', '00000000-0000-0000-0000-000000000001'), - ('c5d5a32a-c072-4594-989a-4b55acc5d11b', 'UCD', '354grhji5', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_managing_organization" ENABLE KEYS */; - --- Dumping data for table public.dataset_other: -1 rows -/*!40000 ALTER TABLE "dataset_other" DISABLE KEYS */; -INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "managing_organization_ror_id", "size", "standards_followed", "acknowledgement", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'none', 'NA', '00000000-0000-0000-0000-000000000002'), - ('2fca4640-6f0e-406c-8c7a-e93a0740b9c6', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org', 'NA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; - --- Dumping data for table public.dataset_readme: -1 rows -/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; -INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES - ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; - --- Dumping data for table public.dataset_record_keys: -1 rows -/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; -INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES - ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), - ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; - --- Dumping data for table public.dataset_related_item: -1 rows -/*!40000 ALTER TABLE "dataset_related_item" DISABLE KEYS */; -INSERT INTO "dataset_related_item" ("id", "type", "relation_type", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'main', 'main', '00000000-0000-0000-0000-000000000002'), - ('f55af3f0-16f9-4049-8beb-f6673d32bef0', '', '', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_related_item" ENABLE KEYS */; - --- Dumping data for table public.dataset_related_item_contributor: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_contributor" DISABLE KEYS */; -INSERT INTO "dataset_related_item_contributor" ("id", "name", "name_type", "creator", "contributor_type", "dataset_related_item_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'AIREADI', 'string', 'true', 'owner', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_related_item_contributor" ENABLE KEYS */; - --- Dumping data for table public.dataset_related_item_identifier: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_identifier" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_related_item_identifier" ENABLE KEYS */; - --- Dumping data for table public.dataset_related_item_other: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_other" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_related_item_other" ENABLE KEYS */; - --- Dumping data for table public.dataset_related_item_title: -1 rows -/*!40000 ALTER TABLE "dataset_related_item_title" DISABLE KEYS */; -/*!40000 ALTER TABLE "dataset_related_item_title" ENABLE KEYS */; - --- Dumping data for table public.dataset_rights: -1 rows -/*!40000 ALTER TABLE "dataset_rights" DISABLE KEYS */; -INSERT INTO "dataset_rights" ("id", "rights", "uri", "identifier", "identifier_scheme", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'), - ('e9fd3c26-843b-465a-b950-8d23005df384', 'NA', 'https://orcid.org', 'none', 'ORCID', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_rights" ENABLE KEYS */; - --- Dumping data for table public.dataset_subject: -1 rows -/*!40000 ALTER TABLE "dataset_subject" DISABLE KEYS */; -INSERT INTO "dataset_subject" ("id", "subject", "scheme", "scheme_uri", "value_uri", "classification_code", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'), - ('5ce2ba12-e536-4858-8913-7de2225cecc3', '', '', '', '', 'NLM''s Medical Subject', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_subject" ENABLE KEYS */; - --- Dumping data for table public.dataset_title: -1 rows -/*!40000 ALTER TABLE "dataset_title" DISABLE KEYS */; -INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES - ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; - --- Dumping data for table public.participant: -1 rows -/*!40000 ALTER TABLE "participant" DISABLE KEYS */; -INSERT INTO "participant" ("id", "first_name", "last_name", "address", "age", "created_at", "updated_on", "study_id") VALUES - ('00000000-0000-0000-0000-000000000002', 'bhavesh', 'patel', '3904 university ave', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'sanjay', 'soundarajan', '123 gold coast', '27', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000003'), - ('00000000-0000-0000-0000-000000000004', 'billy', 'sanders', '123 gold coast', '32', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000004'), - ('921ba857-dd08-4149-8f5c-245c6c93ef84', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('458d2c15-6ed8-4f70-a47d-70b42f2f1b86', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('35750167-40c5-4f4a-9d8e-ebe89c2efcfc', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('43c54d45-2f63-41da-8d18-6d3ef06ba476', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('b444520d-0eac-4065-a86d-004481f68d8a', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('88c7592a-4382-4d6b-a197-e880e49db3c0', 'aydan1', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('ba73ed99-6ec2-46e0-acdb-4a00c31dd572', 'aydan', 'gasimova', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000001', 'aydan', 'gasimova1', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('006306a7-0ddb-4163-952d-2939712e190d', 'aydan', 'gasimova1', '1221d kibler drive', '20', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "participant" ENABLE KEYS */; - --- Dumping data for table public.study_arm: -1 rows --- done -/*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; -INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'arm1', 'Experimental', 'Lorem Ipsum', ARRAY ['intervention 1', 'intervention 2'], '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; - --- Dumping data for table public.study_available_ipd: -1 rows --- done -/*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; -INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'AS2655AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'AS625AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; - --- Dumping data for table public.study_contact: -1 rows --- done -/*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; -INSERT INTO "study_contact" ("id", "name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Dejah Johnston', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Reanna Rolfson', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Verner Nolan', 'Monahan and Sons', NULL, '501-039-841', '', 'Verner19@yahoo.com', TRUE, '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'Lela Cormier', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', TRUE, '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; - --- Dumping data for table public.study_description: -1 rows --- done -/*!40000 ALTER TABLE "study_description" DISABLE KEYS */; -INSERT INTO "study_description" ("id", "brief_summary", "detailed_description", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'study summary', 'big description', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'study summary', 'big description', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'study summary', 'big description', '00000000-0000-0000-0000-000000000003'); -/*!40000 ALTER TABLE "study_description" ENABLE KEYS */; - --- Dumping data for table public.study_design: -1 rows --- done -/*!40000 ALTER TABLE "study_design" DISABLE KEYS */; -INSERT INTO "study_design" ("id", "design_allocation", "study_type", "design_intervention_model", "design_intervention_model_description", "design_primary_purpose", "design_masking", "design_masking_description", "design_who_masked_list", "phase_list", "enrollment_count", "enrollment_type", "number_arms", "design_observational_model_list", "design_time_perspective_list", "bio_spec_retention", "bio_spec_description", "target_duration", "number_groups_cohorts", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Randomized', 'Interventional', 'Treatment', 'description', 'Single Group Assignment', 'Single', 'description', ARRAY ['Participant'], ARRAY ['Phase 1'], 20, 'Actual', 30, NULL, NULL, NULL, NULL, NULL, NULL, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', NULL, 'Observational', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 20, 'Actual', NULL, ARRAY ['Cohort'], ARRAY ['Retrospective'], 'None Retained', 'description', '5 Days', 30, '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_design" ENABLE KEYS */; - --- Dumping data for table public.study_eligibility: 6 rows --- done -/*!40000 ALTER TABLE "study_eligibility" DISABLE KEYS */; -INSERT INTO "study_eligibility" ("id", "gender", "gender_based", "gender_description", "healthy_volunteers", "inclusion_criteria", "exclusion_criteria", "study_population", "sampling_method", "study_id", "minimum_age_value", "minimum_age_unit", "maximum_age_value", "maximum_age_unit") VALUES - ('00000000-0000-0000-0000-000000000001', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], NULL, NULL, '00000000-0000-0000-0000-000000000001', 24, 'Years', 34, 'Years'), - ('00000000-0000-0000-0000-000000000002', 'All', 'Yes', 'Description', 'Yes', ARRAY ['inclusion 1'], ARRAY ['exclusion 1'], 'Description', 'Probability Sample', '00000000-0000-0000-0000-000000000002', 24, 'Years', 34, 'Years'); -/*!40000 ALTER TABLE "study_eligibility" ENABLE KEYS */; - --- Dumping data for table public.study_identification: -1 rows --- done -/*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; -INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', FALSE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', TRUE, '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; - --- Dumping data for table public.study_intervention: -1 rows --- done -/*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; -INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', ARRAY ['name 1'], ARRAY ['name 1'], '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; - --- Dumping data for table public.study_ipdsharing: -1 rows --- done -/*!40000 ALTER TABLE "study_ipdsharing" DISABLE KEYS */; -INSERT INTO "study_ipdsharing" ("id", "ipd_sharing", "ipd_sharing_description", "ipd_sharing_info_type_list", "ipd_sharing_time_frame", "ipd_sharing_access_criteria", "ipd_sharing_url", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Yes', 'Lorem Ipsum', ARRAY ['Study Protocol'], 'January 2025', 'No criteria', 'https://orcid.org/', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_ipdsharing" ENABLE KEYS */; - --- Dumping data for table public.study_link: -1 rows --- done -/*!40000 ALTER TABLE "study_link" DISABLE KEYS */; -INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_link" ENABLE KEYS */; - --- Dumping data for table public.study_location: -1 rows --- done -/*!40000 ALTER TABLE "study_location" DISABLE KEYS */; -INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_location" ENABLE KEYS */; - --- Dumping data for table public.study_other: -1 rows --- done -/*!40000 ALTER TABLE "study_other" DISABLE KEYS */; -INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', TRUE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '1 GB', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', FALSE, ARRAY ['condition 1'], ARRAY ['keyword 1'], '3 GB', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_other" ENABLE KEYS */; - --- Dumping data for table public.study_overall_official: -1 rows --- done -/*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; -INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Zoey Bashirian', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Ashlynn Grady', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Maiya Bartoletti', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; - --- Dumping data for table public.study_reference: 6 rows --- done -/*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; -INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'Yes', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'PMID1A2234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; - --- Dumping data for table public.study_sponsors_collaborators: -1 rows -/*!40000 ALTER TABLE "study_sponsors_collaborators" DISABLE KEYS */; --- done -INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "responsible_party_investigator_name", "responsible_party_investigator_title", "responsible_party_investigator_affiliation", "lead_sponsor_name","collaborator_name", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Principal Investigator', 'Sean West', 'Title 1', 'Wyman Inc', 'Kurtis Daniel', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Principal Investigator', 'Sean East', 'Title 1', 'Medhurst Inc', 'Maiya Bartoletti', ARRAY ['Person 1', 'Person 2'], '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_sponsors_collaborators" ENABLE KEYS */; - --- Dumping data for table public.study_status: -1 rows --- done -/*!40000 ALTER TABLE "study_status" DISABLE KEYS */; -INSERT INTO "study_status" ("id", "overall_status", "why_stopped", "start_date", "start_date_type", "completion_date", "completion_date_type", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Recruiting', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Anticipated', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Suspended', 'Lorem Ipsum', '2021-08-21 12:57:34', 'Actual', '2022-08-21 12:57:44', 'Actual', '00000000-0000-0000-0000-000000000002'); -/*!40000 ALTER TABLE "study_status" ENABLE KEYS */; - --- Dumping data for table public.version: -1 rows --- done -/*!40000 ALTER TABLE "version" DISABLE KEYS */; -INSERT INTO "version" ("id", "title", "published", "changelog", "updated_on", "doi", "created_at", "published_on", "dataset_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Version 1', 'true', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Version 2', 'false', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Version 1', 'false', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000004', 'Version 1', 'false', 'lorem ipsum', 1693957896, '2435464e643', 1693957896, 1693957896, '00000000-0000-0000-0000-000000000003'); -/*!40000 ALTER TABLE "version" ENABLE KEYS */; - --- Dumping data for table public.version_participants: -1 rows +-- Dumping data for table public.version_participants: 1 rows /*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES - ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; -COMMIT; - /*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index bb5a7f0d..4a093ff7 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -563,19 +563,19 @@ CREATE TABLE IF NOT EXISTS "study_arm" ( -- Dumping data for table public.study_arm: -1 rows /*!40000 ALTER TABLE "study_arm" DISABLE KEYS */; -INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), - ('3fa464ca-6701-4a75-ab84-c26f3d3f49be', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), - ('527b87cc-55e5-4e39-ada6-1ed738cdde47', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), - ('3d2189e8-e95b-4d1b-ac1e-b0716bbe9eb4', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), - ('47c1c51b-f145-4b7a-af99-f05eb0feb133', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), - ('50278410-a4ca-4e0b-bff0-632f9a1c447a', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), - ('cb555a08-5387-4d34-b397-1ddd10fec0b9', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002'), - ('038bb56d-2b8b-483a-a974-3612fc52b2a3', 'arm2', 'Experimental', 'Lorem Ipsum', '{inter1,"intervention 2"}', '00000000-0000-0000-0000-000000000001'), - ('173c6350-ba74-47fd-ae34-f39e2c4901ab', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'), - ('91dca128-d30d-41e3-8115-2a548b029e04', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001'); +INSERT INTO "study_arm" ("id", "label", "type", "description", "intervention_list", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002', 1694326095), + ('3fa464ca-6701-4a75-ab84-c26f3d3f49be', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694326095), + ('527b87cc-55e5-4e39-ada6-1ed738cdde47', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002', 1694326095), + ('3d2189e8-e95b-4d1b-ac1e-b0716bbe9eb4', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002', 1694326095), + ('47c1c51b-f145-4b7a-af99-f05eb0feb133', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002', 1694326095), + ('50278410-a4ca-4e0b-bff0-632f9a1c447a', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002', 1694326095), + ('cb555a08-5387-4d34-b397-1ddd10fec0b9', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000002', 1694326095), + ('038bb56d-2b8b-483a-a974-3612fc52b2a3', 'arm2', 'Experimental', 'Lorem Ipsum', '{inter1,"intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694326095), + ('173c6350-ba74-47fd-ae34-f39e2c4901ab', 'arm1', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694326095), + ('91dca128-d30d-41e3-8115-2a548b029e04', 'arm2', 'Experimental', 'Lorem Ipsum', '{"intervention 1","intervention 2"}', '00000000-0000-0000-0000-000000000001', 1694326095); /*!40000 ALTER TABLE "study_arm" ENABLE KEYS */; -- Dumping structure for table public.study_available_ipd @@ -586,16 +586,17 @@ CREATE TABLE IF NOT EXISTS "study_available_ipd" ( "url" VARCHAR NOT NULL, "comment" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_available_ipd_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_available_ipd: -1 rows /*!40000 ALTER TABLE "study_available_ipd" DISABLE KEYS */; -INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'AS2655AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'AS625AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_available_ipd" ("id", "identifier", "type", "url", "comment", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'AS25AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'AS2655AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'AS625AF', 'Study Protocol', 'https://someurl.io', '', '00000000-0000-0000-0000-000000000002', 1694326095); /*!40000 ALTER TABLE "study_available_ipd" ENABLE KEYS */; -- Dumping structure for table public.study_contact @@ -609,17 +610,19 @@ CREATE TABLE IF NOT EXISTS "study_contact" ( "email_address" VARCHAR NOT NULL, "central_contact" BOOLEAN NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_contact_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_contact: -1 rows /*!40000 ALTER TABLE "study_contact" DISABLE KEYS */; -INSERT INTO "study_contact" ("id", "name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', 'true', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'Lela', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000003', 'Verner', 'Monahan and Sons', NULL, '501-039-841', '', 'Verner19@yahoo.com', 'false', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', 'false', '00000000-0000-0000-0000-000000000001'); +INSERT INTO "study_contact" ("id", "name", "affiliation", "role", "phone", "phone_ext", "email_address", "central_contact", "study_id", "created_at" +) VALUES + ('00000000-0000-0000-0000-000000000001', 'Dejah', 'Erdman Inc', NULL, '501-039-841', '', 'Dejah83@hotmail.com', 'true', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000004', 'Lela', 'Metz LLC', NULL, '501-039-841', '', 'Lela84@hotmail.com', 'true', '00000000-0000-0000-0000-000000000002', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'Verner', 'Monahan and Sons', NULL, '501-039-841', '', 'Verner19@yahoo.com', 'false', '00000000-0000-0000-0000-000000000002', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'Reanna', 'Schowalter, Ullrich and Reichert', NULL, '501-039-841', '', 'Reanna79@hotmail.com', 'false', '00000000-0000-0000-0000-000000000001', 1694326095); /*!40000 ALTER TABLE "study_contact" ENABLE KEYS */; -- Dumping structure for table public.study_contributor @@ -766,19 +769,21 @@ CREATE TABLE IF NOT EXISTS "study_identification" ( "identifier_link" VARCHAR NOT NULL, "secondary" BOOLEAN NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_identification_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_identification: -1 rows /*!40000 ALTER TABLE "study_identification" DISABLE KEYS */; -INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'), - ('00000000-0000-0000-0000-000000000005', 'ADF897ADS', 'NIH Grant Number', 'domain', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000002'), - ('d70c6003-1a9d-4ee2-adca-3250dd1ae50a', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_identification" ("id", "identifier", "identifier_type", "identifier_domain", "identifier_link", "secondary", "study_id", "created_at" +) VALUES + ('00000000-0000-0000-0000-000000000001', 'ADF89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'ADF8934ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'AD6F89ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000004', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002', 1694326095), + ('00000000-0000-0000-0000-000000000005', 'ADF897ADS', 'NIH Grant Number', 'domain', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'false', '00000000-0000-0000-0000-000000000002', 1694326095), + ('d70c6003-1a9d-4ee2-adca-3250dd1ae50a', 'ADF897ADS', 'NIH Grant Number', '', 'https://reporter.nih.gov/quickSearch/K01HL147713', 'true', '00000000-0000-0000-0000-000000000002', 1694326095); /*!40000 ALTER TABLE "study_identification" ENABLE KEYS */; -- Dumping structure for table public.study_intervention @@ -790,15 +795,16 @@ CREATE TABLE IF NOT EXISTS "study_intervention" ( "arm_group_label_list" VARCHAR[] NOT NULL, "other_name_list" VARCHAR[] NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_intervention_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_intervention: -1 rows /*!40000 ALTER TABLE "study_intervention" DISABLE KEYS */; -INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001'); +INSERT INTO "study_intervention" ("id", "type", "name", "description", "arm_group_label_list", "other_name_list", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'Drug', 'Test Name1', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'Drug', 'Test Name2', 'Lorem Ipsum', '{"name 1"}', '{"name 1"}', '00000000-0000-0000-0000-000000000001', 1694326095); /*!40000 ALTER TABLE "study_intervention" ENABLE KEYS */; -- Dumping structure for table public.study_ipdsharing @@ -837,17 +843,18 @@ CREATE TABLE IF NOT EXISTS "study_link" ( "url" VARCHAR NOT NULL, "title" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_link_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_link: -1 rows /*!40000 ALTER TABLE "study_link" DISABLE KEYS */; -INSERT INTO "study_link" ("id", "url", "title", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_link" ("id", "url", "title", "study_id", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'https://schema.aireadi.org/', 'schema2', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'https://schema.aireadi.org/', 'schema3', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000004', 'https://schema.aireadi.org/', 'schema1', '00000000-0000-0000-0000-000000000002', 1694326095); /*!40000 ALTER TABLE "study_link" ENABLE KEYS */; -- Dumping structure for table public.study_location @@ -860,16 +867,18 @@ CREATE TABLE IF NOT EXISTS "study_location" ( "zip" VARCHAR NOT NULL, "country" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_location_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_location: -1 rows /*!40000 ALTER TABLE "study_location" DISABLE KEYS */; -INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_location" ("id", "facility", "status", "city", "state", "zip", "country", "study_id", , "created_at" +) VALUES + ('00000000-0000-0000-0000-000000000001', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'facility2', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'facility1', 'Recruting', 'San Diego', 'CA', '92121', 'USA', '00000000-0000-0000-0000-000000000002', 1694326095); /*!40000 ALTER TABLE "study_location" ENABLE KEYS */; -- Dumping structure for table public.study_other @@ -908,18 +917,20 @@ CREATE TABLE IF NOT EXISTS "study_overall_official" ( "affiliation" VARCHAR NOT NULL, "role" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_overall_official_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_overall_official: -1 rows /*!40000 ALTER TABLE "study_overall_official" DISABLE KEYS */; -INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'), - ('b1683ba3-26ca-42c5-a257-1974dbbf4f8b', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'), - ('319c21f2-9441-48ec-a64c-ab839a1da2a3', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "study_id", "created_at" +) VALUES + ('00000000-0000-0000-0000-000000000001', 'Zoey', 'Lowe, Kshlerin and Ward', 'Study Director', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002', 1694326095), + ('b1683ba3-26ca-42c5-a257-1974dbbf4f8b', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002', 1694326095), + ('319c21f2-9441-48ec-a64c-ab839a1da2a3', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002', 1694326095); /*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; -- Dumping structure for table public.study_reference @@ -929,16 +940,18 @@ CREATE TABLE IF NOT EXISTS "study_reference" ( "type" VARCHAR NOT NULL, "citation" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_reference_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_reference: -1 rows /*!40000 ALTER TABLE "study_reference" DISABLE KEYS */; -INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'Yes', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001'), - ('00000000-0000-0000-0000-000000000003', 'PMID1A2234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000002'); +INSERT INTO "study_reference" ("id", "identifier", "type", "citation", "study_id", "created_at" +) VALUES + ('00000000-0000-0000-0000-000000000001', 'PMID1234 ', 'Yes', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000002', 'PMID12234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000001', 1694326095), + ('00000000-0000-0000-0000-000000000003', 'PMID1A2234 ', 'No', 'Lorem Ipsum', '00000000-0000-0000-0000-000000000002',1694326095); /*!40000 ALTER TABLE "study_reference" ENABLE KEYS */; -- Dumping structure for table public.study_sponsors_collaborators From 87e76924a1c161c6d9f53af59bfd3678c1f540e8 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 12 Sep 2023 20:29:02 +0000 Subject: [PATCH 134/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index fa9fef91..c2a0eaf1 100644 --- a/app.py +++ b/app.py @@ -63,6 +63,7 @@ def destroy_schema(): with engine.begin() as conn: """Create the database schema.""" model.db.drop_all() + with app.app_context(): engine = model.db.session.get_bind() metadata = MetaData() @@ -77,10 +78,12 @@ def destroy_schema(): if __name__ == "__main__": - from argparse import ArgumentParser + parser = ArgumentParser() - parser.add_argument("-p", "--port", default=5000, type=int, help="port to listen on") + parser.add_argument( + "-p", "--port", default=5000, type=int, help="port to listen on" + ) args = parser.parse_args() port = args.port From 64507e830c94e3656e75138841ee3533e8963514 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 12 Sep 2023 18:19:16 -0700 Subject: [PATCH 135/505] fix: update database elements for study metadata --- apis/study_metadata/study_design.py | 7 ------- model/study_metadata/study_arm.py | 2 +- model/study_metadata/study_other.py | 4 ++-- model/study_metadata/study_status.py | 20 ++++++++++---------- sql/init_timezones.sql | 21 ++++++++++----------- 5 files changed, 23 insertions(+), 31 deletions(-) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 23a2fdd2..29faedeb 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -48,10 +48,3 @@ def put(self, study_id: int): db.session.commit() return study_.study_design.to_dict() - # def post(self, study_id: int): - # data = request.json - # study_design_ = Study.query.get(study_id) - # study_design_ = StudyDesign.from_data(study_design_, data) - # db.session.add(study_design_) - # db.session.commit() - # return study_design_.to_dict() diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 49a91a24..d6188ff1 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -19,7 +19,7 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) label = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) description = db.Column(db.String, nullable=False) intervention_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index ace65790..7b948442 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -14,7 +14,7 @@ def __init__(self, study): self.oversight_has_dmc = False self.conditions = [] self.keywords = [] - self.size = "" + self.size = 0 __tablename__ = "study_other" @@ -22,7 +22,7 @@ def __init__(self, study): oversight_has_dmc = db.Column(db.BOOLEAN, nullable=False) conditions = db.Column(ARRAY(String), nullable=False) keywords = db.Column(ARRAY(String), nullable=False) - size = db.Column(db.String, nullable=False) + size = db.Column(db.BigInteger, nullable=False) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_other") diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 4e37ca0c..e08734d0 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -9,22 +9,22 @@ class StudyStatus(db.Model): def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study - self.overall_status = "" + self.overall_status = None self.why_stopped = "" self.start_date = None - self.start_date_type = "" + self.start_date_type = None self.completion_date = None - self.completion_date_type = "" + self.completion_date_type = None __tablename__ = "study_status" id = db.Column(db.CHAR(36), primary_key=True) - overall_status = db.Column(db.String, nullable=False) + overall_status = db.Column(db.String, nullable=True) why_stopped = db.Column(db.String, nullable=False) - start_date = db.Column(db.DateTime, nullable=True) - start_date_type = db.Column(db.String, nullable=False) - completion_date = db.Column(db.DateTime, nullable=True) - completion_date_type = db.Column(db.String, nullable=False) + start_date = db.Column(db.String, nullable=True) + start_date_type = db.Column(db.String, nullable=True) + completion_date = db.Column(db.String, nullable=True) + completion_date_type = db.Column(db.String, nullable=True) study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) study = db.relationship("Study", back_populates="study_status") @@ -35,9 +35,9 @@ def to_dict(self): "id": self.id, "overall_status": self.overall_status, "why_stopped": self.why_stopped, - "start_date": str(self.start_date), + "start_date": self.start_date, "start_date_type": self.start_date_type, - "completion_date": str(self.completion_date), + "completion_date": self.completion_date, "completion_date_type": self.completion_date_type, } diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 4a093ff7..9edd316b 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -887,7 +887,7 @@ CREATE TABLE IF NOT EXISTS "study_other" ( "oversight_has_dmc" BOOLEAN NOT NULL, "conditions" VARCHAR[] NOT NULL, "keywords" VARCHAR[] NOT NULL, - "size" VARCHAR NOT NULL, + "size" BIGINT NOT NULL, "study_id" CHAR(36) NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_other_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION @@ -896,8 +896,8 @@ CREATE TABLE IF NOT EXISTS "study_other" ( -- Dumping data for table public.study_other: -1 rows /*!40000 ALTER TABLE "study_other" DISABLE KEYS */; INSERT INTO "study_other" ("id", "oversight_has_dmc", "conditions", "keywords", "size", "study_id") VALUES - ('00000000-0000-0000-0000-000000000001', 'true', '{"condition 1"}', '{"keyword 1"}', '1 GB', '00000000-0000-0000-0000-000000000001'), - ('7a1217d6-6e58-432d-b747-36e1dec81499', 'true', '{conditionupdate}', '{"keyword 1"}', '3 GB', '00000000-0000-0000-0000-000000000002'), + ('00000000-0000-0000-0000-000000000001', 'true', '{"condition 1"}', '{"keyword 1"}', 32, '00000000-0000-0000-0000-000000000001'), + ('7a1217d6-6e58-432d-b747-36e1dec81499', 'true', '{conditionupdate}', '{"keyword 1"}', 32, '00000000-0000-0000-0000-000000000002'), ('837dce97-4073-4c4a-8d65-d1b7c87f92c6', 'false', '{}', '{}', '', 'e5a2a1d2-850f-465a-8fc1-6a1aec6d9e5a'), ('a651f9b1-3db4-4dae-a486-e9f7f7b5a5cb', 'false', '{}', '{}', '', 'ec0064ca-4f34-48a8-9dcc-1377c7ca0a59'), ('ccd3e31d-9e45-4329-9c89-2e7c7fa0d53b', 'false', '{}', '{}', '', '995d703e-a6d0-4dc2-95e7-3ce868eb9fb7'), @@ -985,16 +985,15 @@ INSERT INTO "study_sponsors_collaborators" ("id", "responsible_party_type", "res -- Dumping structure for table public.study_status CREATE TABLE IF NOT EXISTS "study_status" ( "id" CHAR(36) NOT NULL, - "overall_status" VARCHAR NOT NULL, + "overall_status" VARCHAR NULL DEFAULT NULL, "why_stopped" VARCHAR NOT NULL, - "start_date" TIMESTAMP NULL DEFAULT NULL, - "start_date_type" VARCHAR NOT NULL, - "completion_date" TIMESTAMP NULL DEFAULT NULL, - "completion_date_type" VARCHAR NOT NULL, - "study_id" CHAR(36) NOT NULL, + "start_date" VARCHAR NULL DEFAULT NULL, + "start_date_type" VARCHAR NULL DEFAULT NULL, + "completion_date" VARCHAR NULL DEFAULT NULL, + "completion_date_type" VARCHAR NULL DEFAULT NULL, + "study_id" CHAR(36) NULL DEFAULT NULL, PRIMARY KEY ("id"), - CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY -("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "study_status_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); -- Dumping data for table public.study_status: 2 rows From 1eea783db9351d918e078fb6f5191364f280732c Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 13 Sep 2023 01:20:03 +0000 Subject: [PATCH 136/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_design.py | 1 - 1 file changed, 1 deletion(-) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 29faedeb..3b7f2875 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -47,4 +47,3 @@ def put(self, study_id: int): study_.study_design.update(request.json) db.session.commit() return study_.study_design.to_dict() - From 72549f128754c85b0c71065e4482c29e82c36662 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 13 Sep 2023 10:26:48 -0700 Subject: [PATCH 137/505] feat: all FK fields got NOT NULL in Model classes --- apis/dataset_metadata/dataset_access.py | 18 +++-------- apis/dataset_metadata/dataset_consent.py | 27 +++-------------- apis/dataset_metadata/dataset_date.py | 15 ++-------- .../dataset_de_ident_level.py | 17 ++--------- .../dataset_managing_organization.py | 19 ++---------- apis/dataset_metadata/dataset_other.py | 15 ++-------- apis/dataset_metadata/dataset_readme.py | 15 ++-------- apis/dataset_metadata/dataset_record_keys.py | 30 +++---------------- db-docker-compose.yaml | 2 +- model/dataset.py | 2 +- model/dataset_contributor.py | 2 +- model/dataset_metadata/dataset_access.py | 2 +- .../dataset_alternate_identifier.py | 2 +- model/dataset_metadata/dataset_consent.py | 2 +- .../dataset_contributor_affiliation.py | 2 +- model/dataset_metadata/dataset_date.py | 2 +- .../dataset_de_ident_level.py | 2 +- model/dataset_metadata/dataset_description.py | 2 +- model/dataset_metadata/dataset_funder.py | 2 +- .../dataset_managing_organization.py | 2 +- model/dataset_metadata/dataset_other.py | 2 +- model/dataset_metadata/dataset_readme.py | 2 +- model/dataset_metadata/dataset_record_keys.py | 2 +- .../dataset_metadata/dataset_related_item.py | 2 +- .../dataset_related_item_contributor.py | 2 +- .../dataset_related_item_identifier.py | 2 +- .../dataset_related_item_other.py | 2 +- .../dataset_related_item_title.py | 2 +- model/dataset_metadata/dataset_rights.py | 2 +- model/dataset_metadata/dataset_subject.py | 2 +- model/dataset_metadata/dataset_title.py | 2 +- model/participant.py | 2 +- model/study_metadata/study_arm.py | 2 +- model/study_metadata/study_available_ipd.py | 2 +- model/study_metadata/study_contact.py | 2 +- model/study_metadata/study_description.py | 2 +- model/study_metadata/study_design.py | 2 +- model/study_metadata/study_eligibility.py | 2 +- model/study_metadata/study_identification.py | 2 +- model/study_metadata/study_intervention.py | 2 +- model/study_metadata/study_ipdsharing.py | 2 +- model/study_metadata/study_link.py | 2 +- model/study_metadata/study_location.py | 2 +- model/study_metadata/study_other.py | 2 +- .../study_metadata/study_overall_official.py | 2 +- model/study_metadata/study_reference.py | 2 +- .../study_sponsors_collaborators.py | 2 +- model/study_metadata/study_status.py | 2 +- model/version.py | 2 +- 49 files changed, 68 insertions(+), 170 deletions(-) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index 2286876a..4e3350cd 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -29,19 +29,9 @@ def get(self, study_id: int, dataset_id: int): dataset_access_ = dataset_.dataset_access return [d.to_dict() for d in dataset_access_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_request_keys_ = DatasetAccess.from_data(data_obj, data) - db.session.add(dataset_request_keys_) + dataset_ = Dataset.query.get(dataset_id) + dataset_access_ = dataset_.dataset_access.update(request.json) db.session.commit() - return dataset_request_keys_.to_dict() - - @api.route("/study//dataset//metadata/access/") - class DatasetAccessUpdate(Resource): - def put(self, study_id: int, dataset_id: int, access_id: int): - data = request.json - dataset_access_ = DatasetAccess.query.get(access_id) - dataset_access_.update(request.json) - db.session.commit() - return dataset_access_.to_dict() + return dataset_access_.to_dict() diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 96d9f564..5acba9c3 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -31,28 +31,9 @@ def get(self, study_id: int, dataset_id: int): dataset_consent_ = dataset_.dataset_consent return [d.to_dict() for d in dataset_consent_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_consent_ = DatasetConsent.query.get(i["id"]) - if dataset_consent_ == None: - return f"Study link {i['id']} Id is not found", 404 - dataset_consent_.update(i) - list_of_elements.append(dataset_consent_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_consent_ = DatasetConsent.from_data(data_obj, i) - db.session.add(dataset_consent_) - list_of_elements.append(dataset_consent_.to_dict()) + dataset_ = Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent.update(data) db.session.commit() - return list_of_elements - - @api.route("/study//dataset//metadata/consent/") - class DatasetAccessUpdate(Resource): - def put(self, study_id: int, dataset_id: int, consent_id: int): - dataset_consent_ = DatasetConsent.query.get(consent_id) - dataset_consent_.update(request.json) - db.session.commit() - return dataset_consent_.to_dict() + return dataset_consent_.to_dict() diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index afe8fe10..d05015c1 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -27,18 +27,9 @@ def get(self, study_id: int, dataset_id: int): dataset_date_ = dataset_.dataset_date return [d.to_dict() for d in dataset_date_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_date_ = DatasetDate.from_data(data_obj, data) - db.session.add(dataset_date_) + dataset_ = Dataset.query.get(dataset_id) + dataset_date_ = dataset_.dataset_date.update(data) db.session.commit() return dataset_date_.to_dict() - - @api.route("/study//dataset//metadata/date/") - class DatasetDateUpdate(Resource): - def put(self, study_id: int, dataset_id: int, date_id: int): - dataset_date_ = DatasetDate.query.get(date_id) - dataset_date_.update(request.json) - db.session.commit() - return dataset_date_.to_dict() diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 8526adcf..648a625c 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -31,20 +31,9 @@ def get(self, study_id: int, dataset_id: int): de_ident_level_ = dataset_.dataset_de_ident_level return [d.to_dict() for d in de_ident_level_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - de_ident_level_ = DatasetDeIdentLevel.from_data(data_obj, data) - db.session.add(de_ident_level_) + dataset_ = Dataset.query.get(dataset_id) + de_ident_level_ = dataset_.dataset_de_ident_level.update(data) db.session.commit() return de_ident_level_.to_dict() - - @api.route( - "/study//dataset//metadata/de_ident_level/" - ) - class DatasetDatasetDeIdentLevelUpdate(Resource): - def put(self, study_id: int, dataset_id: int, de_ident_level_id: int): - de_ident_level_ = DatasetDeIdentLevel.query.get(de_ident_level_id) - de_ident_level_.update(request.json) - db.session.commit() - return de_ident_level_.to_dict() diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index c5ccc766..fd9d475a 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -27,22 +27,9 @@ def get(self, study_id: int, dataset_id: int): managing_organization_ = dataset_.dataset_managing_organization return [d.to_dict() for d in managing_organization_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - managing_organization_ = DatasetManagingOrganization.from_data(data_obj, data) - db.session.add(managing_organization_) + dataset_ = Dataset.query.get(dataset_id) + managing_organization_ = dataset_.dataset_managing_organization.update(request.json) db.session.commit() return managing_organization_.to_dict() - - @api.route( - "/study//dataset//metadata/managing_organization/" - ) - class DatasetManagingOrganizationUpdate(Resource): - def put(self, study_id: int, dataset_id: int, managing_organization_id: int): - managing_organization_ = DatasetManagingOrganization.query.get( - managing_organization_id - ) - managing_organization_.update(request.json) - db.session.commit() - return managing_organization_.to_dict() diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index ac3542ed..7d4748d7 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -30,18 +30,9 @@ def get(self, study_id: int, dataset_id: int): dataset_other_ = dataset_.dataset_other return [d.to_dict() for d in dataset_other_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_other_ = DatasetOther.from_data(data_obj, data) - db.session.add(dataset_other_) + dataset_ = Dataset.query.get(dataset_id) + dataset_other_ = dataset_.dataset_other.update(request.json) db.session.commit() return dataset_other_.to_dict() - - @api.route("/study//dataset//metadata/other/") - class DatasetOtherUpdate(Resource): - def put(self, study_id: int, dataset_id: int, other_id: int): - dataset_other_ = DatasetOther.query.get(other_id) - dataset_other_.update(request.json) - db.session.commit() - return dataset_other_.to_dict() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index ee344ee9..391f93cc 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -22,18 +22,9 @@ def get(self, study_id: int, dataset_id: int): dataset_readme_ = dataset_.dataset_readme return [d.to_dict() for d in dataset_readme_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_readme_ = DatasetReadme.from_data(data_obj, data) - db.session.add(dataset_readme_) + dataset_ = Dataset.query.get(dataset_id) + dataset_readme_ = dataset_.dataset_readme.update(data) db.session.commit() return dataset_readme_.to_dict() - - @api.route("/study//dataset//metadata/readme/") - class DatasetReadmeUpdate(Resource): - def put(self, study_id: int, dataset_id: int, readme_id: int): - dataset_readme_ = DatasetReadme.query.get(readme_id) - dataset_readme_.update(request.json) - db.session.commit() - return dataset_readme_.to_dict() diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index e0156fd2..7ce9ec2c 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -25,31 +25,9 @@ def get(self, study_id: int, dataset_id: int): dataset_record_keys_ = dataset_.dataset_record_keys return [d.to_dict() for d in dataset_record_keys_] - def post(self, study_id: int, dataset_id: int): + def put(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_record_keys_ = DatasetRecordKeys.query.get(i["id"]) - if dataset_record_keys_ == None: - return f"Study link {i['id']} Id is not found", 404 - dataset_record_keys_.update(i) - list_of_elements.append(dataset_record_keys_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_record_keys_ = DatasetRecordKeys.from_data(data_obj, i) - db.session.add(dataset_record_keys_) - list_of_elements.append(dataset_record_keys_.to_dict()) + dataset_ = Dataset.query.get(dataset_id) + dataset_record_keys_ = dataset_.dataset_de_ident_level.update(data) db.session.commit() - return list_of_elements - - @api.route( - "/study//dataset//metadata/record_keys/" - ) - class DatasetRecordKeysUpdate(Resource): - def put(self, study_id: int, dataset_id: int, record_key_id: int): - data = request.json - dataset_record_keys_ = DatasetRecordKeys.query.get(record_key_id) - dataset_record_keys_.update(request.json) - db.session.commit() - return dataset_record_keys_.to_dict() + return dataset_record_keys_.to_dict() diff --git a/db-docker-compose.yaml b/db-docker-compose.yaml index 98ee777f..eaabcefb 100644 --- a/db-docker-compose.yaml +++ b/db-docker-compose.yaml @@ -9,7 +9,7 @@ services: POSTGRES_DB: fairhub_local ports: - 5432:5432 - volumes: +# volumes: # - ./postgres-data:/var/lib/postgresql/data # - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql # pgadmin: diff --git a/model/dataset.py b/model/dataset.py index 979d21ee..947ee087 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -19,7 +19,7 @@ def __init__(self, study): updated_on = db.Column(db.BigInteger, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="dataset") dataset_contributors = db.relationship( diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index a19d52a0..2f92b2e4 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -17,7 +17,7 @@ def __init__(self): creator = db.Column(db.BOOLEAN, nullable=False) contributor_type = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_contributors") dataset_contributor_affiliation = db.relationship( "DatasetContributorAffiliation", back_populates="dataset_contributors" diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 6507cf8a..0a183179 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -14,7 +14,7 @@ def __init__(self, dataset): url = db.Column(db.String, nullable=False) url_last_checked = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_access") def to_dict(self): diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index 47aa7068..9a51f15c 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -12,7 +12,7 @@ def __init__(self, dataset): identifier = db.Column(db.String, nullable=False) identifier_type = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_alternate_identifier") def to_dict(self): diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 6ac8c6e7..38f7fd16 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -18,7 +18,7 @@ def __init__(self, dataset): no_methods = db.Column(db.BOOLEAN, nullable=False) details = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_consent") def to_dict(self): diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index eb9dff57..be0e02c8 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -16,7 +16,7 @@ def __init__(self, dataset): "DatasetContributor", back_populates="dataset_contributor_affiliation" ) dataset_contributor_id = db.Column( - db.String, db.ForeignKey("dataset_contributor.id") + db.String, db.ForeignKey("dataset_contributor.id"), nullable=False ) def to_dict(self): diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index a46127bb..21054f3e 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -13,7 +13,7 @@ def __init__(self, dataset): date_type = db.Column(db.String, nullable=False) data_information = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_date") def to_dict(self): diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index 53bf32e9..566a212b 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -18,7 +18,7 @@ def __init__(self, dataset): k_anon = db.Column(db.BOOLEAN, nullable=False) details = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_de_ident_level") def to_dict(self): diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 65eb2f96..ce3a056d 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -12,7 +12,7 @@ def __init__(self, dataset): description = db.Column(db.String, nullable=False) description_type = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_description") def to_dict(self): diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index fd0206b0..f1375971 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -17,7 +17,7 @@ def __init__(self, dataset): award_uri = db.Column(db.String, nullable=False) award_title = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_funder") def to_dict(self): diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py index 80eae34c..4e8b786b 100644 --- a/model/dataset_metadata/dataset_managing_organization.py +++ b/model/dataset_metadata/dataset_managing_organization.py @@ -13,7 +13,7 @@ def __init__(self, dataset): name = db.Column(db.String, nullable=False) ror_id = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_managing_organization") def to_dict(self): diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index f33d24a5..f8409b2a 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -19,7 +19,7 @@ def __init__(self, dataset): standards_followed = db.Column(db.String, nullable=False) acknowledgement = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_other") def to_dict(self): diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index 3e815908..b3c0594c 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -11,7 +11,7 @@ def __init__(self, dataset): id = db.Column(db.CHAR(36), primary_key=True) content = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_readme") def to_dict(self): diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index d84d1447..4d6c48b0 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -12,7 +12,7 @@ def __init__(self, dataset): key_type = db.Column(db.String, nullable=False) key_details = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_record_keys") def to_dict(self): diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index a42580af..4007888a 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -12,7 +12,7 @@ def __init__(self, dataset): type = db.Column(db.String, nullable=False) relation_type = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_related_item") dataset_related_item_contributor = db.relationship( "DatasetRelatedItemContributor", back_populates="dataset_related_item" diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index 4a7388ec..33bedbcf 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -15,7 +15,7 @@ def __init__(self, dataset): contributor_type = db.Column(db.String, nullable=False) dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id") + db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_contributor" diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 7cc33d96..390fd4fa 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -15,7 +15,7 @@ def __init__(self): scheme_type = db.Column(db.String, nullable=False) dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id") + db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_identifier" diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py index 0312985e..b7910694 100644 --- a/model/dataset_metadata/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -20,7 +20,7 @@ def __init__(self, dataset): edition = db.Column(db.String, nullable=False) dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id") + db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_other" diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 92efeb4e..79e23a64 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -13,7 +13,7 @@ def __init__(self, dataset): title = db.Column(db.String, nullable=False) dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id") + db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_title" diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index da63b24c..b8cb70b2 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -15,7 +15,7 @@ def __init__(self, dataset): identifier = db.Column(db.String, nullable=False) identifier_scheme = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_rights") def to_dict(self): diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index e42c61c4..b62342be 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -16,7 +16,7 @@ def __init__(self, dataset): value_uri = db.Column(db.String, nullable=False) classification_code = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_subject") def to_dict(self): diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index f6e367e9..739b5253 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -13,7 +13,7 @@ def __init__(self, dataset): type = db.Column(db.String, nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_title") - dataset_id = db.Column(db.String, db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.String, db.ForeignKey("dataset.id"), nullable=False) def to_dict(self): return { diff --git a/model/participant.py b/model/participant.py index 95de0e90..37a1671a 100644 --- a/model/participant.py +++ b/model/participant.py @@ -20,7 +20,7 @@ def __init__(self, study): created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="participants") dataset_versions = db.relationship( "Version", diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index d6188ff1..50d64289 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -24,7 +24,7 @@ def __init__(self, study): intervention_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_arm") def to_dict(self): diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index d955ae3b..5bec4b7a 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -22,7 +22,7 @@ def __init__(self, study): comment = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_available_ipd") def to_dict(self): diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index bdcc507d..de5492f9 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -26,7 +26,7 @@ def __init__(self, study, role, central_contact): central_contact = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_contact") def to_dict(self): diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index cad0558e..daf56895 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -17,7 +17,7 @@ def __init__(self, study): brief_summary = db.Column(db.String, nullable=False) detailed_description = db.Column(db.String, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_description") def to_dict(self): diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index f265928d..55ca0480 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -53,7 +53,7 @@ def __init__(self, study): target_duration = db.Column(db.String, nullable=True) number_groups_cohorts = db.Column(db.Integer, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=True) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_design") def to_dict(self): diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 05591d71..d9c4a4f7 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -39,7 +39,7 @@ def __init__(self, study): study_population = db.Column(db.String, nullable=True) sampling_method = db.Column(db.String, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_eligibility") def to_dict(self): diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 67485de2..7c0dafae 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -20,7 +20,7 @@ def __init__(self, study, secondary): secondary = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_identification") def to_dict(self): diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 68569a68..11e18bd9 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -24,7 +24,7 @@ def __init__(self, study): other_name_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_intervention") def to_dict(self): diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index f7d58b93..12dda9f8 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -27,7 +27,7 @@ def __init__(self, study): ipd_sharing_access_criteria = db.Column(db.String, nullable=False) ipd_sharing_url = db.Column(db.String, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_ipdsharing") def to_dict(self): diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index bf46e638..8fe82a08 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -19,7 +19,7 @@ def __init__(self, study): title = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_link") def to_dict(self): diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 925980cd..cc995302 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -23,7 +23,7 @@ def __init__(self, study): country = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_location") def to_dict(self): diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 7b948442..ced816ee 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -24,7 +24,7 @@ def __init__(self, study): keywords = db.Column(ARRAY(String), nullable=False) size = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_other") def to_dict(self): diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index f3df7f1d..d302b6c8 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -20,7 +20,7 @@ def __init__(self, study): role = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_overall_official") def to_dict(self): diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 8804420a..a39193bc 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -21,7 +21,7 @@ def __init__(self, study): citation = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_reference") def to_dict(self): diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index b781b912..c3a6e853 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -27,7 +27,7 @@ def __init__(self, study): lead_sponsor_name = db.Column(db.String, nullable=False) collaborator_name = db.Column(ARRAY(String), nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_sponsors_collaborators") def to_dict(self): diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index e08734d0..5ff397af 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -26,7 +26,7 @@ def __init__(self, study): completion_date = db.Column(db.String, nullable=True) completion_date_type = db.Column(db.String, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id")) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) study = db.relationship("Study", back_populates="study_status") def to_dict(self): diff --git a/model/version.py b/model/version.py index b7240706..ed7eb3ca 100644 --- a/model/version.py +++ b/model/version.py @@ -28,7 +28,7 @@ def __init__(self, dataset): created_at = db.Column(db.BigInteger, nullable=False) published_on = db.Column(db.BigInteger, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id")) + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_versions") participants = db.relationship("Participant", secondary=version_participants) From d117513bd4c37724f12bc41a96905f5af8f7ee3a Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 13 Sep 2023 23:07:46 -0700 Subject: [PATCH 138/505] fix: reverse order is removed from study metadata one-to-many rel.s --- apis/invited_contributor.py | 32 ++++++++++++++++--- apis/study_metadata/study_available_ipd.py | 3 +- apis/study_metadata/study_contact.py | 3 +- apis/study_metadata/study_intervention.py | 3 +- apis/study_metadata/study_link.py | 3 +- apis/study_metadata/study_location.py | 3 +- apis/study_metadata/study_overall_official.py | 4 +-- apis/study_metadata/study_reference.py | 2 +- model/study_metadata/arm.py | 2 +- model/study_metadata/identifiers.py | 3 +- 10 files changed, 37 insertions(+), 21 deletions(-) diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index bd224f2d..48db9b77 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -1,6 +1,6 @@ from flask_restx import Namespace, Resource, fields - -from model import StudyInvitedContributor, Study +from flask import request +from model import StudyInvitedContributor, Study, db, User api = Namespace("contributor", description="contributors", path="/") @@ -15,11 +15,33 @@ ) -@api.route("/study//contributor") +@api.route("/study//invited_contributor") class AddParticipant(Resource): @api.doc("invited contributor") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(contributors_model) - def post(self, study_id: int, invited_contributor_id: int): - invited_contributors = Study.query.get(invited_contributor_id) + def post(self, study_id: int, study, email_address: str, permission: str): + study_obj = Study.query.get(study_id) + user = User.query.get(email_address=email_address) + # User exists + if user: + add_user_to_study(study, user, permission) + return "Invitation is sent" + elif not user: + invite_user_to_study(study, user, permission) + db.session.commit() + + +def add_user_to_study(study, user, permission): + pass + + +def invite_user_to_study(study, user, permission): + pass + + + +# if study_obj.invited_contributors.email_address not in study_obj.study_contributors.user.email_address: +# add_invited_contributor = StudyInvitedContributor.from_data(study_obj, request.json) +# db.session.add(add_invited_contributor) \ No newline at end of file diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 6f252e3f..25a03f47 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -26,8 +26,7 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_available_ipd_ = study_.study_available_ipd sorted_study_available_ipd = sorted( - study_available_ipd_, key=lambda x: x.created_at, reverse=True - ) + study_available_ipd_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_available_ipd] @api.doc("update available") diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index e9e8c6ab..7b7b44d6 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -28,8 +28,7 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_contact_ = study_.study_contact sorted_study_contact = sorted( - study_contact_, key=lambda x: x.created_at, reverse=True - ) + study_contact_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_contact if s.central_contact] def post(self, study_id: int): diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index e1c43136..eff9e63d 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -30,8 +30,7 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_intervention_ = study_.study_intervention sorted_study_intervention = sorted( - study_intervention_, key=lambda x: x.created_at, reverse=True - ) + study_intervention_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_intervention] def post(self, study_id: int): diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index c37c4525..750f6ffb 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -27,8 +27,7 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_link_ = study_.study_link sorted_study_link_ = sorted( - study_link_, key=lambda x: x.created_at, reverse=True - ) + study_link_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_link_] def post(self, study_id: int): diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 87e8e824..294ea902 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -31,8 +31,7 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_location_ = study_.study_location sorted_study_location = sorted( - study_location_, key=lambda x: x.created_at, reverse=True - ) + study_location_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_location] def post(self, study_id: int): diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 81095f60..197f3186 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -27,9 +27,9 @@ class StudyOverallOfficialResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_overall_official_ = study_.study_overall_official - # sorted_by_date = sorted([i.created_at for i in study_overall_official_], reverse=True) + # sorted_by_date = sorted([i.created_at for i in study_overall_official_]) sorted_study_overall = sorted( - study_overall_official_, key=lambda x: x.created_at, reverse=True + study_overall_official_, key=lambda x: x.created_at ) return [i.to_dict() for i in sorted_study_overall] diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 49b1b386..1ed9806e 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -30,7 +30,7 @@ def get(self, study_id: int): study_reference_ = study_.study_reference print(study_.study_reference) sorted_study_reference = sorted( - study_reference_, key=lambda x: x.created_at, reverse=True + study_reference_, key=lambda x: x.created_at ) return [s.to_dict() for s in sorted_study_reference] diff --git a/model/study_metadata/arm.py b/model/study_metadata/arm.py index 8b4e0c97..9c4100f4 100644 --- a/model/study_metadata/arm.py +++ b/model/study_metadata/arm.py @@ -9,7 +9,7 @@ def __init__(self, study: Study): def to_dict(self): sorted_study_arms = sorted( - self.study.study_arm, key=lambda arm: arm.created_at, reverse=True + self.study.study_arm, key=lambda arm: arm.created_at ) return { "arms": [arm.to_dict() for arm in sorted_study_arms], diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 07482c8b..7d2bbda5 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -10,8 +10,7 @@ def __init__(self, study: Study): def to_dict(self): sorted_study_identifications = sorted( self.study.study_identification, - key=lambda identifier: identifier.created_at, - reverse=True, + key=lambda identifier: identifier.created_at ) return { "primary": [ From 8aa63794c8780d271cd2bf382dad0907898f9c91 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 14 Sep 2023 06:09:10 +0000 Subject: [PATCH 139/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dataset_metadata/dataset_managing_organization.py | 4 +++- apis/invited_contributor.py | 4 ++-- apis/study_metadata/study_available_ipd.py | 3 ++- apis/study_metadata/study_contact.py | 3 +-- apis/study_metadata/study_intervention.py | 3 ++- apis/study_metadata/study_link.py | 3 +-- apis/study_metadata/study_location.py | 3 +-- apis/study_metadata/study_reference.py | 4 +--- model/study_metadata/arm.py | 4 +--- model/study_metadata/identifiers.py | 2 +- 10 files changed, 15 insertions(+), 18 deletions(-) diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index fd9d475a..9c2a4226 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -30,6 +30,8 @@ def get(self, study_id: int, dataset_id: int): def put(self, study_id: int, dataset_id: int): data = request.json dataset_ = Dataset.query.get(dataset_id) - managing_organization_ = dataset_.dataset_managing_organization.update(request.json) + managing_organization_ = dataset_.dataset_managing_organization.update( + request.json + ) db.session.commit() return managing_organization_.to_dict() diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 48db9b77..c185f922 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -30,6 +30,7 @@ def post(self, study_id: int, study, email_address: str, permission: str): return "Invitation is sent" elif not user: invite_user_to_study(study, user, permission) + db.session.commit() @@ -41,7 +42,6 @@ def invite_user_to_study(study, user, permission): pass - # if study_obj.invited_contributors.email_address not in study_obj.study_contributors.user.email_address: # add_invited_contributor = StudyInvitedContributor.from_data(study_obj, request.json) -# db.session.add(add_invited_contributor) \ No newline at end of file +# db.session.add(add_invited_contributor) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 25a03f47..04318f60 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -26,7 +26,8 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_available_ipd_ = study_.study_available_ipd sorted_study_available_ipd = sorted( - study_available_ipd_, key=lambda x: x.created_at) + study_available_ipd_, key=lambda x: x.created_at + ) return [s.to_dict() for s in sorted_study_available_ipd] @api.doc("update available") diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 7b7b44d6..d9779449 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -27,8 +27,7 @@ class StudyContactResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_contact_ = study_.study_contact - sorted_study_contact = sorted( - study_contact_, key=lambda x: x.created_at) + sorted_study_contact = sorted(study_contact_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_contact if s.central_contact] def post(self, study_id: int): diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index eff9e63d..4112ac77 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -30,7 +30,8 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_intervention_ = study_.study_intervention sorted_study_intervention = sorted( - study_intervention_, key=lambda x: x.created_at) + study_intervention_, key=lambda x: x.created_at + ) return [s.to_dict() for s in sorted_study_intervention] def post(self, study_id: int): diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 750f6ffb..ddb6f245 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -26,8 +26,7 @@ class StudyLinkResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_link_ = study_.study_link - sorted_study_link_ = sorted( - study_link_, key=lambda x: x.created_at) + sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_link_] def post(self, study_id: int): diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 294ea902..b6f6cdb0 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -30,8 +30,7 @@ class StudyLocationResource(Resource): def get(self, study_id: int): study_ = Study.query.get(study_id) study_location_ = study_.study_location - sorted_study_location = sorted( - study_location_, key=lambda x: x.created_at) + sorted_study_location = sorted(study_location_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_location] def post(self, study_id: int): diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 1ed9806e..e04ce8a5 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -29,9 +29,7 @@ def get(self, study_id: int): study_ = Study.query.get(study_id) study_reference_ = study_.study_reference print(study_.study_reference) - sorted_study_reference = sorted( - study_reference_, key=lambda x: x.created_at - ) + sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_reference] def post(self, study_id: int): diff --git a/model/study_metadata/arm.py b/model/study_metadata/arm.py index 9c4100f4..c165a9d5 100644 --- a/model/study_metadata/arm.py +++ b/model/study_metadata/arm.py @@ -8,9 +8,7 @@ def __init__(self, study: Study): study: Study def to_dict(self): - sorted_study_arms = sorted( - self.study.study_arm, key=lambda arm: arm.created_at - ) + sorted_study_arms = sorted(self.study.study_arm, key=lambda arm: arm.created_at) return { "arms": [arm.to_dict() for arm in sorted_study_arms], "study_type": self.study.study_design.study_type, diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 7d2bbda5..746afd50 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -10,7 +10,7 @@ def __init__(self, study: Study): def to_dict(self): sorted_study_identifications = sorted( self.study.study_identification, - key=lambda identifier: identifier.created_at + key=lambda identifier: identifier.created_at, ) return { "primary": [ From ab502e1fce34c7665b1a9c91a2efbb2ada5789b4 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 14 Sep 2023 23:03:15 -0700 Subject: [PATCH 140/505] wip: invited_contributor POST endpoint --- apis/__init__.py | 4 ++ apis/dataset_metadata/dataset_description.py | 18 +++++--- apis/dataset_metadata/dataset_title.py | 24 +++++++--- apis/invited_contributor.py | 46 ++++++++------------ app.py | 2 +- model/__init__.py | 2 +- model/invited_study_contributor.py | 16 +++---- model/study.py | 19 +++++++- model/study_contributor.py | 14 ++---- 9 files changed, 82 insertions(+), 63 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 47090630..80671eeb 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -5,9 +5,11 @@ from apis.study_metadata_namespace import api as study_metadata_namespace from .contributor import api as contributors_api + from .dataset import api as dataset_api from .participant import api as participants_api from .study import api as study_api +from .invited_contributor import api as invited_contributors from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd @@ -77,3 +79,5 @@ def get(self): api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) +api.add_namespace(invited_contributors) + diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 473e0c38..bc9059a3 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -30,17 +30,25 @@ def get(self, study_id: int, dataset_id: int): def post(self, study_id: int, dataset_id: int): data = request.json data_obj = Dataset.query.get(dataset_id) - dataset_description_ = DatasetDescription.from_data(data_obj, data) - db.session.add(dataset_description_) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_description_ = DatasetDescription.query.get(i["id"]) + dataset_description_.update(i) + list_of_elements.append(dataset_description_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_description_ = DatasetDescription.from_data(data_obj, i) + db.session.add(dataset_description_) + list_of_elements.append(dataset_description_.to_dict()) db.session.commit() - return dataset_description_.to_dict() + return list_of_elements @api.route( "/study//dataset//metadata/description/" ) class DatasetDescriptionUpdate(Resource): - def put(self, study_id: int, dataset_id: int, description_id: int): + def delete(self, study_id: int, dataset_id: int, description_id: int): dataset_description_ = DatasetDescription.query.get(description_id) - dataset_description_.update(request.json) + db.session.delete(dataset_description_) db.session.commit() return dataset_description_.to_dict() diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 016341c7..2fe5d880 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -31,15 +31,25 @@ def get(self, study_id: int, dataset_id: int): def post(self, study_id: int, dataset_id: int): data = request.json data_obj = Dataset.query.get(dataset_id) - dataset_title_ = DatasetTitle.from_data(data_obj, data) - db.session.add(dataset_title_) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_title_ = DatasetTitle.query.get(i["id"]) + dataset_title_.update(i) + list_of_elements.append(dataset_title_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_title_ = DatasetTitle.from_data(data_obj, i) + db.session.add(dataset_title_) + list_of_elements.append(dataset_title_.to_dict()) db.session.commit() - return dataset_title_.to_dict() + return list_of_elements - @api.route("/study//dataset//metadata/title/") - class DatasetTitleUpdate(Resource): - def put(self, study_id: int, dataset_id: int, title_id: int): + @api.route( + "/study//dataset//metadata/title/" + ) + class DatasetDescriptionUpdate(Resource): + def delete(self, study_id: int, dataset_id: int, title_id: int): dataset_title_ = DatasetTitle.query.get(title_id) - dataset_title_.update(request.json) + db.session.delete(dataset_title_) db.session.commit() return dataset_title_.to_dict() diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 48db9b77..29b4808b 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -1,12 +1,11 @@ from flask_restx import Namespace, Resource, fields +from model import StudyInvitedContributor, Study, db, User, StudyContributor from flask import request -from model import StudyInvitedContributor, Study, db, User - -api = Namespace("contributor", description="contributors", path="/") +api = Namespace("invited_contributors", description="invited contributors", path="/") contributors_model = api.model( - "Version", + "InvitedContributor", { "user_id": fields.String(required=True), "permission": fields.String(required=True), @@ -15,33 +14,26 @@ ) -@api.route("/study//invited_contributor") -class AddParticipant(Resource): +@api.route("/study//invited-contributor") +class AddInvitedContributor(Resource): @api.doc("invited contributor") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(contributors_model) - def post(self, study_id: int, study, email_address: str, permission: str): + # @api.marshal_with(contributors_model) + def post(self, study_id: int): + # try: study_obj = Study.query.get(study_id) - user = User.query.get(email_address=email_address) - # User exists + data = request.json + email_address = data["email_address"] + user = User.query.filter_by(email_address=email_address).first() + permission = data["permission"] if user: - add_user_to_study(study, user, permission) - return "Invitation is sent" - elif not user: - invite_user_to_study(study, user, permission) - db.session.commit() - - -def add_user_to_study(study, user, permission): - pass - - -def invite_user_to_study(study, user, permission): - pass - + study_obj.add_user_to_study(user, permission) + elif not user: + study_obj.invite_user_to_study(email_address, permission) + print("User successfully saved") + db.session.commit() + # except: + # print("error occured", 422) -# if study_obj.invited_contributors.email_address not in study_obj.study_contributors.user.email_address: -# add_invited_contributor = StudyInvitedContributor.from_data(study_obj, request.json) -# db.session.add(add_invited_contributor) \ No newline at end of file diff --git a/app.py b/app.py index c2a0eaf1..09d67423 100644 --- a/app.py +++ b/app.py @@ -69,7 +69,7 @@ def destroy_schema(): metadata = MetaData() metadata.reflect(bind=engine) table_names = [table.name for table in metadata.tables.values()] - print(table_names) + # print(table_names) if len(table_names) == 0: with engine.begin() as conn: """Create the database schema.""" diff --git a/model/__init__.py b/model/__init__.py index 041cfb11..b66647f8 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -68,7 +68,6 @@ "db", "User", "DatasetContributor", - "StudyInvitedContributor", "StudyContributor", "DatasetOther", "DatasetAccess", @@ -109,4 +108,5 @@ "StudyStatus", "Identifiers", "Arm", + "StudyInvitedContributor" ] diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index f062f78a..70d2445b 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -2,12 +2,15 @@ from datetime import datetime from .db import db import datetime +from datetime import timezone class StudyInvitedContributor(db.Model): - def __init__(self): + def __init__(self, study, user, permission): self.id = str(uuid.uuid4()) - + self.study = study + self.user = user + self.permission = permission __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) @@ -22,12 +25,3 @@ def to_dict(self): "permission": self.permission, "invited_on": self.invited_on, } - - @staticmethod - def from_data(data: dict): - invited_contributor = StudyInvitedContributor() - invited_contributor.email_address = data["email_address"] - invited_contributor.permission = data["permission"] - invited_contributor.invited_on = datetime.datetime.now(timezone.utc).timestamp() - - return invited_contributor diff --git a/model/study.py b/model/study.py index b6a4ef64..fff1d612 100644 --- a/model/study.py +++ b/model/study.py @@ -30,7 +30,7 @@ def __init__(self): updated_on = db.Column(db.BigInteger, nullable=False) dataset = db.relationship("Dataset", back_populates="study") - study_contributors = db.relationship("StudyContributor", back_populates="study") + study_contributors = db.relationship("StudyContributor", back_populates="study", lazy="dynamic") participants = db.relationship("Participant", back_populates="study") invited_contributors = db.relationship( "StudyInvitedContributor", back_populates="study" @@ -105,3 +105,20 @@ def validate(self): def touch(self): self.updated_on = datetime.datetime.now(timezone.utc).timestamp() + + def add_user_to_study(self, user, permission): + contributor = self.study_contributors.filter(model.StudyContributor.user_id == user.id) + if contributor: + raise Exception("User is already a contributor in study") + else: + contributor = model.StudyContributor(self, user, permission) + db.session.add(contributor) + + def invite_user_to_study(self, email_address, permission): + invited_contributor = self.invited_contributors.filter_by(email_address=email_address).one_or_none() + if invited_contributor: + raise Exception("User is already a contributor in study") + else: + contributor_add = model.StudyInvitedContributor(self, email_address, permission) + db.session.add(contributor_add) + diff --git a/model/study_contributor.py b/model/study_contributor.py index 9ba13b88..cefdd1b0 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -4,9 +4,11 @@ class StudyContributor(db.Model): - def __init__(self): + def __init__(self, study, user, permission): self.id = str(uuid.uuid4()) - + self.study = study + self.user = user + self.permission = permission __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) @@ -24,11 +26,3 @@ def to_dict(self): "user_id": self.user_id, "study_id": self.study_id, } - - @staticmethod - def from_data(data: dict): - study_contributor = StudyContributor() - study_contributor.permission = data["permission"] - study_contributor.user_id = data["user_id"] - study_contributor.study_id = data["study_id"] - return study_contributor From 430c867d4ced15ea962c185b8b740849b43e11e6 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 15 Sep 2023 06:04:48 +0000 Subject: [PATCH 141/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 1 - apis/dataset_metadata/dataset_title.py | 4 +--- apis/invited_contributor.py | 2 +- model/__init__.py | 2 +- model/invited_study_contributor.py | 1 + model/study.py | 17 ++++++++++++----- model/study_contributor.py | 1 + 7 files changed, 17 insertions(+), 11 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 80671eeb..5ae384ac 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -80,4 +80,3 @@ def get(self): api.add_namespace(participants_api) api.add_namespace(contributors_api) api.add_namespace(invited_contributors) - diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 2fe5d880..3e8e8291 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -44,9 +44,7 @@ def post(self, study_id: int, dataset_id: int): db.session.commit() return list_of_elements - @api.route( - "/study//dataset//metadata/title/" - ) + @api.route("/study//dataset//metadata/title/") class DatasetDescriptionUpdate(Resource): def delete(self, study_id: int, dataset_id: int, title_id: int): dataset_title_ = DatasetTitle.query.get(title_id) diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 96bf7b6e..6328e59b 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -1,6 +1,7 @@ from flask_restx import Namespace, Resource, fields from model import StudyInvitedContributor, Study, db, User, StudyContributor from flask import request + api = Namespace("invited_contributors", description="invited contributors", path="/") @@ -38,7 +39,6 @@ def post(self, study_id: int): # print("error occured", 422) - def invite_user_to_study(study, user, permission): pass diff --git a/model/__init__.py b/model/__init__.py index b66647f8..de1087d1 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -108,5 +108,5 @@ "StudyStatus", "Identifiers", "Arm", - "StudyInvitedContributor" + "StudyInvitedContributor", ] diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 70d2445b..5a474c19 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -11,6 +11,7 @@ def __init__(self, study, user, permission): self.study = study self.user = user self.permission = permission + __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) diff --git a/model/study.py b/model/study.py index fff1d612..c3a50c19 100644 --- a/model/study.py +++ b/model/study.py @@ -30,7 +30,9 @@ def __init__(self): updated_on = db.Column(db.BigInteger, nullable=False) dataset = db.relationship("Dataset", back_populates="study") - study_contributors = db.relationship("StudyContributor", back_populates="study", lazy="dynamic") + study_contributors = db.relationship( + "StudyContributor", back_populates="study", lazy="dynamic" + ) participants = db.relationship("Participant", back_populates="study") invited_contributors = db.relationship( "StudyInvitedContributor", back_populates="study" @@ -107,7 +109,9 @@ def touch(self): self.updated_on = datetime.datetime.now(timezone.utc).timestamp() def add_user_to_study(self, user, permission): - contributor = self.study_contributors.filter(model.StudyContributor.user_id == user.id) + contributor = self.study_contributors.filter( + model.StudyContributor.user_id == user.id + ) if contributor: raise Exception("User is already a contributor in study") else: @@ -115,10 +119,13 @@ def add_user_to_study(self, user, permission): db.session.add(contributor) def invite_user_to_study(self, email_address, permission): - invited_contributor = self.invited_contributors.filter_by(email_address=email_address).one_or_none() + invited_contributor = self.invited_contributors.filter_by( + email_address=email_address + ).one_or_none() if invited_contributor: raise Exception("User is already a contributor in study") else: - contributor_add = model.StudyInvitedContributor(self, email_address, permission) + contributor_add = model.StudyInvitedContributor( + self, email_address, permission + ) db.session.add(contributor_add) - diff --git a/model/study_contributor.py b/model/study_contributor.py index cefdd1b0..3307e6c9 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -9,6 +9,7 @@ def __init__(self, study, user, permission): self.study = study self.user = user self.permission = permission + __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) From 78bf1c742558293dbdab537424bcef62b3422b52 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 17 Sep 2023 21:05:02 -0700 Subject: [PATCH 142/505] feat: added invited_contributor POST endpoint --- apis/invited_contributor.py | 28 ++++++++++------------------ model/__init__.py | 5 +++-- model/invited_study_contributor.py | 8 +++++--- model/study.py | 17 +++++++++++------ 4 files changed, 29 insertions(+), 29 deletions(-) diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 96bf7b6e..038a4fa6 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -1,5 +1,5 @@ from flask_restx import Namespace, Resource, fields -from model import StudyInvitedContributor, Study, db, User, StudyContributor +from model import StudyInvitedContributor, Study, db, User, StudyContributor, StudyException from flask import request api = Namespace("invited_contributors", description="invited contributors", path="/") @@ -27,22 +27,14 @@ def post(self, study_id: int): email_address = data["email_address"] user = User.query.filter_by(email_address=email_address).first() permission = data["permission"] - if user: - study_obj.add_user_to_study(user, permission) - - elif not user: - study_obj.invite_user_to_study(email_address, permission) - print("User successfully saved") + contributor_ = None + try: + if user: + contributor_ = study_obj.add_user_to_study(user, permission) + else: + contributor_ = study_obj.invite_user_to_study(email_address, permission) + except StudyException as ex: + return ex.args[0], 409 db.session.commit() - # except: - # print("error occured", 422) - - - -def invite_user_to_study(study, user, permission): - pass - + return contributor_.to_dict(), 201 -# if study_obj.invited_contributors.email_address not in study_obj.study_contributors.user.email_address: -# add_invited_contributor = StudyInvitedContributor.from_data(study_obj, request.json) -# db.session.add(add_invited_contributor) diff --git a/model/__init__.py b/model/__init__.py index b66647f8..c2ff3834 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -3,7 +3,7 @@ from .db import db from .participant import Participant -from .study import Study +from .study import Study, StudyException from .user import User from .dataset import Dataset @@ -108,5 +108,6 @@ "StudyStatus", "Identifiers", "Arm", - "StudyInvitedContributor" + "StudyInvitedContributor", + "StudyException" ] diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 70d2445b..c4f79dfe 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -6,11 +6,12 @@ class StudyInvitedContributor(db.Model): - def __init__(self, study, user, permission): + def __init__(self, study, email_address, permission): self.id = str(uuid.uuid4()) self.study = study - self.user = user self.permission = permission + self.invited_on = datetime.datetime.now(timezone.utc).timestamp() + self.email_address = email_address __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) @@ -21,7 +22,8 @@ def __init__(self, study, user, permission): def to_dict(self): return { - "email_address": self.id, + "study_id": self.study.id, + "email_address": self.email_address, "permission": self.permission, "invited_on": self.invited_on, } diff --git a/model/study.py b/model/study.py index fff1d612..e2411355 100644 --- a/model/study.py +++ b/model/study.py @@ -6,6 +6,10 @@ import datetime +class StudyException(Exception): + pass + + class Study(db.Model): """A study is a collection of datasets and participants""" @@ -33,7 +37,7 @@ def __init__(self): study_contributors = db.relationship("StudyContributor", back_populates="study", lazy="dynamic") participants = db.relationship("Participant", back_populates="study") invited_contributors = db.relationship( - "StudyInvitedContributor", back_populates="study" + "StudyInvitedContributor", back_populates="study", lazy="dynamic" ) study_arm = db.relationship("StudyArm", back_populates="study") @@ -109,16 +113,17 @@ def touch(self): def add_user_to_study(self, user, permission): contributor = self.study_contributors.filter(model.StudyContributor.user_id == user.id) if contributor: - raise Exception("User is already a contributor in study") + raise StudyException("User is already exists in study") else: contributor = model.StudyContributor(self, user, permission) db.session.add(contributor) + return contributor def invite_user_to_study(self, email_address, permission): - invited_contributor = self.invited_contributors.filter_by(email_address=email_address).one_or_none() + invited_contributor = self.invited_contributors.filter(model.StudyInvitedContributor.email_address == email_address).one_or_none() if invited_contributor: - raise Exception("User is already a contributor in study") + raise StudyException("This email address has already been invited to this study") else: contributor_add = model.StudyInvitedContributor(self, email_address, permission) - db.session.add(contributor_add) - + db.session.add(contributor_add) + return contributor_add From 143fd2486bbe6e75cece52586a6b20ff5d3f9f4d Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 18 Sep 2023 08:56:24 -0700 Subject: [PATCH 143/505] wip: user authorization --- apis/login.py | 41 ++++++++++++++++++++++++++++++++++++++++ apis/signup_user.py | 46 +++++++++++++++++++++++++++++++++++++++++++++ model/user.py | 40 +++++++++++++++++++++++++-------------- 3 files changed, 113 insertions(+), 14 deletions(-) create mode 100644 apis/login.py create mode 100644 apis/signup_user.py diff --git a/apis/login.py b/apis/login.py new file mode 100644 index 00000000..7b8bf3c7 --- /dev/null +++ b/apis/login.py @@ -0,0 +1,41 @@ +from flask import Response, jsonify, request +from flask_restx import Namespace, Resource, fields + +from model import Participant, Study, db + +api = Namespace("participant", description="participant operations", path="/") + +login_model = api.model( + "Login", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "created_at": fields.String(required=True), + "updated_on": fields.String(required=True), + "address": fields.String(required=True), + "age": fields.String(required=True), + }, +) + + +@api.route("/login") +class Login(Resource): + @api.doc("participants") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(login_model) + def get(self, study_id: int): + pass + + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(login_model) + + def post(self, study_id: int): + # user query using username + # call user check password + + pass + + diff --git a/apis/signup_user.py b/apis/signup_user.py new file mode 100644 index 00000000..52185094 --- /dev/null +++ b/apis/signup_user.py @@ -0,0 +1,46 @@ +from flask import Response, jsonify, request +from flask_restx import Namespace, Resource, fields + +from model import Participant, Study, db, User + +api = Namespace("participant", description="participant operations", path="/") + +signup_model = api.model( + "Signup", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "created_at": fields.String(required=True), + "updated_on": fields.String(required=True), + "address": fields.String(required=True), + "age": fields.String(required=True), + }, +) + + +@api.route("/signup") +class SignupUser(Resource): + @api.doc("signup_model ") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(signup_model ) + def get(self, study_id: int): + pass + + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(signup_model) + def post(self): + data = request.json + # TODO data[email doesnt exist then raise error; json validation library + user = User.query.filter_by(email_address=data["email_address"]).one_or_none() + if user: + return "This email address is already in use", 409 + user = User.query.filter_by(username=data["username"]).one_or_none() + if user: + return "This username is already in use", 409 + user = User.from_data(data) + db.session.add(user) + db.session.commit() + return user.to_dict(), 201 diff --git a/model/user.py b/model/user.py index 80751dc0..8267150b 100644 --- a/model/user.py +++ b/model/user.py @@ -1,18 +1,20 @@ import uuid from datetime import datetime from .db import db -import datetime from datetime import timezone +import datetime + class User(db.Model): - def __init__(self): + def __init__(self, password): self.id = str(uuid.uuid4()) - + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.set_password(password) __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) - email_address = db.Column(db.String, nullable=False) - username = db.Column(db.String, nullable=False) + email_address = db.Column(db.String, nullable=False, unique=True) + username = db.Column(db.String, nullable=False, unique=True) first_name = db.Column(db.String, nullable=False) last_name = db.Column(db.String, nullable=False) orcid = db.Column(db.String, nullable=False) @@ -29,19 +31,29 @@ def to_dict(self): "first_name": self.first_name, "last_name": self.last_name, "orcid": self.orcid, - "hash": self.hash, "created_at": self.created_at, "institution": self.institution, } @staticmethod def from_data(data: dict): - user = User() - user.email_address = data["email_address"] - user.username = data["username"] - user.first_name = data["first_name"] - user.last_name = data["last_name"] - user.orcid = data["orcid"] - user.hash = data["hash"] - user.institution = data["institution"] + user = User(data["password"]) + user.update(data) return user + + def update(self, data): + self.email_address = data["email_address"] + self.username = data["username"] + self.first_name = data["first_name"] + self.last_name = data["last_name"] + self.orcid = data["orcid"] + self.institution = data["institution"] + + def set_password(self, password): + pass + # hasliyib sonra zibil databasede saxla + + def check_password(self, password): + pass + # hasliyinb check ele + From abf9bdcdc8ccd76e0ac310290966266d462cdabd Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 18 Sep 2023 15:57:50 +0000 Subject: [PATCH 144/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/invited_contributor.py | 11 +++++++++-- apis/login.py | 2 -- model/__init__.py | 2 +- model/invited_study_contributor.py | 1 + model/study.py | 20 +++++++++++++++----- model/user.py | 3 +-- 6 files changed, 27 insertions(+), 12 deletions(-) diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 038a4fa6..1eff8c4e 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -1,6 +1,14 @@ from flask_restx import Namespace, Resource, fields -from model import StudyInvitedContributor, Study, db, User, StudyContributor, StudyException +from model import ( + StudyInvitedContributor, + Study, + db, + User, + StudyContributor, + StudyException, +) from flask import request + api = Namespace("invited_contributors", description="invited contributors", path="/") @@ -37,4 +45,3 @@ def post(self, study_id: int): return ex.args[0], 409 db.session.commit() return contributor_.to_dict(), 201 - diff --git a/apis/login.py b/apis/login.py index 7b8bf3c7..95c15521 100644 --- a/apis/login.py +++ b/apis/login.py @@ -37,5 +37,3 @@ def post(self, study_id: int): # call user check password pass - - diff --git a/model/__init__.py b/model/__init__.py index c2ff3834..da7b573b 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -109,5 +109,5 @@ "Identifiers", "Arm", "StudyInvitedContributor", - "StudyException" + "StudyException", ] diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index c4f79dfe..778565c5 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -12,6 +12,7 @@ def __init__(self, study, email_address, permission): self.permission = permission self.invited_on = datetime.datetime.now(timezone.utc).timestamp() self.email_address = email_address + __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) diff --git a/model/study.py b/model/study.py index e2411355..715e9686 100644 --- a/model/study.py +++ b/model/study.py @@ -34,7 +34,9 @@ def __init__(self): updated_on = db.Column(db.BigInteger, nullable=False) dataset = db.relationship("Dataset", back_populates="study") - study_contributors = db.relationship("StudyContributor", back_populates="study", lazy="dynamic") + study_contributors = db.relationship( + "StudyContributor", back_populates="study", lazy="dynamic" + ) participants = db.relationship("Participant", back_populates="study") invited_contributors = db.relationship( "StudyInvitedContributor", back_populates="study", lazy="dynamic" @@ -111,7 +113,9 @@ def touch(self): self.updated_on = datetime.datetime.now(timezone.utc).timestamp() def add_user_to_study(self, user, permission): - contributor = self.study_contributors.filter(model.StudyContributor.user_id == user.id) + contributor = self.study_contributors.filter( + model.StudyContributor.user_id == user.id + ) if contributor: raise StudyException("User is already exists in study") else: @@ -120,10 +124,16 @@ def add_user_to_study(self, user, permission): return contributor def invite_user_to_study(self, email_address, permission): - invited_contributor = self.invited_contributors.filter(model.StudyInvitedContributor.email_address == email_address).one_or_none() + invited_contributor = self.invited_contributors.filter( + model.StudyInvitedContributor.email_address == email_address + ).one_or_none() if invited_contributor: - raise StudyException("This email address has already been invited to this study") + raise StudyException( + "This email address has already been invited to this study" + ) else: - contributor_add = model.StudyInvitedContributor(self, email_address, permission) + contributor_add = model.StudyInvitedContributor( + self, email_address, permission + ) db.session.add(contributor_add) return contributor_add diff --git a/model/user.py b/model/user.py index 8267150b..c252a465 100644 --- a/model/user.py +++ b/model/user.py @@ -5,12 +5,12 @@ import datetime - class User(db.Model): def __init__(self, password): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() self.set_password(password) + __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) @@ -56,4 +56,3 @@ def set_password(self, password): def check_password(self, password): pass # hasliyinb check ele - From 9a520c176339d186f947385ca9ff9f81b89d366d Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 18 Sep 2023 15:44:56 -0700 Subject: [PATCH 145/505] feat: set /auth/signup POST endpoint --- apis/__init__.py | 7 ++++++- apis/login.py | 25 ++++++++++++++----------- app.py | 7 ++++--- model/user.py | 20 +++++++++++--------- 4 files changed, 35 insertions(+), 24 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 5ae384ac..11bb2721 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -11,6 +11,9 @@ from .study import api as study_api from .invited_contributor import api as invited_contributors +from .signup_user import api as signup +from .login import api as login + from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd from .study_metadata.study_contact import api as contact @@ -63,7 +66,8 @@ api.add_namespace(dataset_metadata_namespace) api.add_namespace(study_metadata_namespace) - +api.add_namespace(signup) +api.add_namespace(login) @api.route("/echo", endpoint="echo") class HelloWorld(Resource): @@ -80,3 +84,4 @@ def get(self): api.add_namespace(participants_api) api.add_namespace(contributors_api) api.add_namespace(invited_contributors) + diff --git a/apis/login.py b/apis/login.py index 7b8bf3c7..a191c2fb 100644 --- a/apis/login.py +++ b/apis/login.py @@ -1,9 +1,8 @@ from flask import Response, jsonify, request from flask_restx import Namespace, Resource, fields - -from model import Participant, Study, db - -api = Namespace("participant", description="participant operations", path="/") +from model import User +from flask import redirect, url_for +api = Namespace("login", description="login", path="/") login_model = api.model( "Login", @@ -19,9 +18,9 @@ ) -@api.route("/login") +@api.route("/auth/login") class Login(Resource): - @api.doc("participants") + @api.doc("login") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(login_model) @@ -31,11 +30,15 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(login_model) + def post(self): + data = request.json + username = data["username"] + user = User.query.filter_by(username=username).one_or_none() + validate_pass = user.check_password(data["password"]) + if user and validate_pass: + return redirect(url_for("study")) + else: + return "Username or password is not correct", 403 - def post(self, study_id: int): - # user query using username - # call user check password - - pass diff --git a/app.py b/app.py index 09d67423..5e453ffc 100644 --- a/app.py +++ b/app.py @@ -5,16 +5,16 @@ import model from apis import api +from flask_bcrypt import Bcrypt # from pyfairdatatools import __version__ - +bcrypt = Bcrypt() def create_app(): """Initialize the core application.""" # create and configure the app app = Flask(__name__) - # `full` if you want to see all the details app.config["SWAGGER_UI_DOC_EXPANSION"] = "list" app.config["RESTX_MASK_SWAGGER"] = False @@ -42,7 +42,7 @@ def create_app(): model.db.init_app(app) api.init_app(app) - + bcrypt.init_app(app) CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) # # @app.cli.command("create-schema") @@ -88,4 +88,5 @@ def destroy_schema(): port = args.port app = create_app() + app.run(host="0.0.0.0", port=port) diff --git a/model/user.py b/model/user.py index 8267150b..c8017678 100644 --- a/model/user.py +++ b/model/user.py @@ -3,14 +3,14 @@ from .db import db from datetime import timezone import datetime - +import app class User(db.Model): - def __init__(self, password): + def __init__(self, password, data): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() - self.set_password(password) + self.set_password(password, data) __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) @@ -37,7 +37,7 @@ def to_dict(self): @staticmethod def from_data(data: dict): - user = User(data["password"]) + user = User(data["password"], data) user.update(data) return user @@ -49,11 +49,13 @@ def update(self, data): self.orcid = data["orcid"] self.institution = data["institution"] - def set_password(self, password): - pass - # hasliyib sonra zibil databasede saxla + def set_password(self, password, data): + hashed_password = app.bcrypt.generate_password_hash(password).decode('utf-8') + self.hash = hashed_password + def check_password(self, password): - pass - # hasliyinb check ele + hashed_password = app.bcrypt.generate_password_hash(password).decode('utf-8') + is_valid = app.bcrypt.check_password_hash(hashed_password, password) + return f"Password: {password}
Hashed Password: {hashed_password}
Is Valid: {is_valid}" From 96ce0ff3b520b4ef4553ef0857865f6dc498833b Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 18 Sep 2023 22:46:44 +0000 Subject: [PATCH 146/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 2 +- apis/login.py | 4 +--- app.py | 2 ++ model/user.py | 7 +++---- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 11bb2721..52e84cd1 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -69,6 +69,7 @@ api.add_namespace(signup) api.add_namespace(login) + @api.route("/echo", endpoint="echo") class HelloWorld(Resource): @api.response(200, "Success") @@ -84,4 +85,3 @@ def get(self): api.add_namespace(participants_api) api.add_namespace(contributors_api) api.add_namespace(invited_contributors) - diff --git a/apis/login.py b/apis/login.py index a191c2fb..73dd8651 100644 --- a/apis/login.py +++ b/apis/login.py @@ -2,6 +2,7 @@ from flask_restx import Namespace, Resource, fields from model import User from flask import redirect, url_for + api = Namespace("login", description="login", path="/") login_model = api.model( @@ -39,6 +40,3 @@ def post(self): return redirect(url_for("study")) else: return "Username or password is not correct", 403 - - - diff --git a/app.py b/app.py index 5e453ffc..461b305e 100644 --- a/app.py +++ b/app.py @@ -11,6 +11,8 @@ # from pyfairdatatools import __version__ bcrypt = Bcrypt() + + def create_app(): """Initialize the core application.""" # create and configure the app diff --git a/model/user.py b/model/user.py index c8017678..8a32d926 100644 --- a/model/user.py +++ b/model/user.py @@ -11,6 +11,7 @@ def __init__(self, password, data): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() self.set_password(password, data) + __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) @@ -50,12 +51,10 @@ def update(self, data): self.institution = data["institution"] def set_password(self, password, data): - hashed_password = app.bcrypt.generate_password_hash(password).decode('utf-8') + hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") self.hash = hashed_password - def check_password(self, password): - hashed_password = app.bcrypt.generate_password_hash(password).decode('utf-8') + hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") is_valid = app.bcrypt.check_password_hash(hashed_password, password) return f"Password: {password}
Hashed Password: {hashed_password}
Is Valid: {is_valid}" - From ff5a38f4a2d7782c724382fcdcc80ced57fb4600 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 18 Sep 2023 15:50:40 -0700 Subject: [PATCH 147/505] feat: set /auth/signup POST endpoint --- apis/signup_user.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/apis/signup_user.py b/apis/signup_user.py index 52185094..061b99b5 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -1,30 +1,32 @@ -from flask import Response, jsonify, request +from flask import request from flask_restx import Namespace, Resource, fields -from model import Participant, Study, db, User +from model import db, User -api = Namespace("participant", description="participant operations", path="/") +api = Namespace("signup", description="signup user", path="/") signup_model = api.model( "Signup", { "id": fields.String(required=True), + "email_address": fields.String(required=True), + "username": fields.String(required=True), "first_name": fields.String(required=True), "last_name": fields.String(required=True), - "created_at": fields.String(required=True), - "updated_on": fields.String(required=True), - "address": fields.String(required=True), - "age": fields.String(required=True), + "orcid": fields.String(required=True), + "hash": fields.String(required=True), + "created_at": fields.Integer(required=True), + "institution": fields.String(required=True) }, ) -@api.route("/signup") +@api.route("/auth/signup") class SignupUser(Resource): @api.doc("signup_model ") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(signup_model ) + # @api.marshal_with(signup_model) def get(self, study_id: int): pass @@ -34,6 +36,8 @@ def get(self, study_id: int): def post(self): data = request.json # TODO data[email doesnt exist then raise error; json validation library + if not data["email_address"]: + raise "Email is not found" user = User.query.filter_by(email_address=data["email_address"]).one_or_none() if user: return "This email address is already in use", 409 From 4d0a6b172928de2514893343e0d87e333a279d2c Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 18 Sep 2023 22:51:31 +0000 Subject: [PATCH 148/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/signup_user.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/signup_user.py b/apis/signup_user.py index 061b99b5..57d1055c 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -16,7 +16,7 @@ "orcid": fields.String(required=True), "hash": fields.String(required=True), "created_at": fields.Integer(required=True), - "institution": fields.String(required=True) + "institution": fields.String(required=True), }, ) From 136ad4bf52cb5def09b89883a6d5af06d1a72308 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 19 Sep 2023 08:58:01 -0700 Subject: [PATCH 149/505] fix: flask-bcrypt updated in poetry --- poetry.lock | 240 ++++++++++++++----------------------------------- pyproject.toml | 1 + 2 files changed, 68 insertions(+), 173 deletions(-) diff --git a/poetry.lock b/poetry.lock index c02a1de2..4587a113 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aniso8601" version = "9.0.1" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" files = [ @@ -19,7 +18,6 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -41,7 +39,6 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -53,7 +50,6 @@ files = [ name = "argon2-cffi" version = "21.3.0" description = "The secure Argon2 password hashing algorithm." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -73,7 +69,6 @@ tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -111,7 +106,6 @@ tests = ["pytest"] name = "arrow" version = "1.2.3" description = "Better dates & times for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -126,7 +120,6 @@ python-dateutil = ">=2.7.0" name = "art" version = "6.0" description = "ASCII Art Library For Python" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -141,7 +134,6 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture name = "astroid" version = "2.15.6" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -161,7 +153,6 @@ wrapt = [ name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -179,7 +170,6 @@ test = ["astroid", "pytest"] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -194,7 +184,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -213,7 +202,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -228,7 +216,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -236,11 +223,44 @@ files = [ {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +[[package]] +name = "bcrypt" +version = "4.0.1" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, + {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, + {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, + {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + [[package]] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -259,7 +279,6 @@ lxml = ["lxml"] name = "black" version = "23.7.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -306,7 +325,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -325,7 +343,6 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "blinker" version = "1.6.2" description = "Fast, simple object-to-object and broadcast signaling" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -337,7 +354,6 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -349,7 +365,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = "*" files = [ @@ -426,7 +441,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -511,7 +525,6 @@ files = [ name = "click" version = "8.1.6" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -526,7 +539,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -538,7 +550,6 @@ files = [ name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -558,7 +569,6 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -634,7 +644,6 @@ toml = ["tomli"] name = "coveragespace" version = "6.0.2" description = "A place to track your code coverage metrics." -category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -653,7 +662,6 @@ requests = ">=2.28,<3.0" name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -681,7 +689,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -693,7 +700,6 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -705,7 +711,6 @@ files = [ name = "dicttoxml" version = "1.7.16" description = "Converts a Python dictionary or other native data type into a valid XML string." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -717,7 +722,6 @@ files = [ name = "dill" version = "0.3.7" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -732,7 +736,6 @@ graph = ["objgraph (>=1.7.2)"] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" -category = "dev" optional = false python-versions = "*" files = [ @@ -743,7 +746,6 @@ files = [ name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -758,7 +760,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -773,7 +774,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "faker" version = "18.13.0" description = "Faker is a Python package that generates fake data for you." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -788,7 +788,6 @@ python-dateutil = ">=2.4" name = "fastjsonschema" version = "2.18.0" description = "Fastest Python implementation of JSON schema" -category = "dev" optional = false python-versions = "*" files = [ @@ -803,7 +802,6 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -820,7 +818,6 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.2" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -840,11 +837,25 @@ Werkzeug = ">=2.3.3" async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] +[[package]] +name = "flask-bcrypt" +version = "1.0.1" +description = "Brcrypt hashing for Flask." +optional = false +python-versions = "*" +files = [ + {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, + {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, +] + +[package.dependencies] +bcrypt = ">=3.1.1" +Flask = "*" + [[package]] name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" -category = "main" optional = false python-versions = "*" files = [ @@ -859,7 +870,6 @@ Flask = ">=0.9" name = "flask-restx" version = "1.1.0" description = "Fully featured framework for fast, easy and documented API development with Flask" -category = "main" optional = false python-versions = "*" files = [ @@ -883,7 +893,6 @@ test = ["Faker (==2.0.0)", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pyt name = "flask-sqlalchemy" version = "3.0.5" description = "Add SQLAlchemy support to your Flask application." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -899,7 +908,6 @@ sqlalchemy = ">=1.4.18" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -911,7 +919,6 @@ files = [ name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -926,7 +933,6 @@ python-dateutil = ">=2.7" name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" files = [ @@ -944,7 +950,6 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1018,7 +1023,6 @@ test = ["objgraph", "psutil"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1030,7 +1034,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1050,7 +1053,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.0.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1069,7 +1071,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1081,7 +1082,6 @@ files = [ name = "ipykernel" version = "6.25.0" description = "IPython Kernel for Jupyter" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1095,7 +1095,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1115,7 +1115,6 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.12.2" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1155,7 +1154,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" -category = "dev" optional = false python-versions = "*" files = [ @@ -1167,7 +1165,6 @@ files = [ name = "ipywidgets" version = "8.1.0" description = "Jupyter interactive widgets" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1189,7 +1186,6 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1204,7 +1200,6 @@ arrow = ">=0.15.0" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1222,7 +1217,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1234,7 +1228,6 @@ files = [ name = "jedi" version = "0.19.0" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1254,7 +1247,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1272,7 +1264,6 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." -category = "dev" optional = false python-versions = "*" files = [ @@ -1287,7 +1278,6 @@ dev = ["hypothesis"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1299,7 +1289,6 @@ files = [ name = "jsonschema" version = "4.18.4" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1331,7 +1320,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1347,7 +1335,6 @@ referencing = ">=0.28.0" name = "jupyter" version = "1.0.0" description = "Jupyter metapackage. Install all the Jupyter components in one go." -category = "dev" optional = false python-versions = "*" files = [ @@ -1368,7 +1355,6 @@ qtconsole = "*" name = "jupyter-client" version = "8.3.0" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1378,7 +1364,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1392,7 +1378,6 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-console" version = "6.6.3" description = "Jupyter terminal console" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1404,7 +1389,7 @@ files = [ ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" prompt-toolkit = ">=3.0.30" pygments = "*" pyzmq = ">=17" @@ -1417,7 +1402,6 @@ test = ["flaky", "pexpect", "pytest"] name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1438,7 +1422,6 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "jupyter-events" version = "0.7.0" description = "Jupyter Event System library" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1464,7 +1447,6 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p name = "jupyter-lsp" version = "2.2.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1480,7 +1462,6 @@ jupyter-server = ">=1.1.2" name = "jupyter-server" version = "2.7.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1493,7 +1474,7 @@ anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" jupyter-events = ">=0.6.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1517,7 +1498,6 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-terminals" version = "0.4.4" description = "A Jupyter Server Extension Providing Terminals." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1537,7 +1517,6 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", name = "jupyterlab" version = "4.0.3" description = "JupyterLab computational environment" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1571,7 +1550,6 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1583,7 +1561,6 @@ files = [ name = "jupyterlab-server" version = "2.24.0" description = "A set of server components for JupyterLab and JupyterLab like applications." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1610,7 +1587,6 @@ test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-valida name = "jupyterlab-widgets" version = "3.0.8" description = "Jupyter interactive widgets for JupyterLab" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1622,7 +1598,6 @@ files = [ name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1668,7 +1643,6 @@ files = [ name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1686,7 +1660,6 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1746,7 +1719,6 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1761,7 +1733,6 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1773,7 +1744,6 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1785,7 +1755,6 @@ files = [ name = "minilog" version = "2.2" description = "Minimalistic wrapper for Python logging." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1797,7 +1766,6 @@ files = [ name = "mistune" version = "3.0.1" description = "A sane and fast Markdown parser with useful plugins and renderers" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1809,7 +1777,6 @@ files = [ name = "mkdocs" version = "1.3.1" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1836,7 +1803,6 @@ i18n = ["babel (>=2.9.0)"] name = "mypy" version = "1.4.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1883,7 +1849,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1895,7 +1860,6 @@ files = [ name = "nbclient" version = "0.8.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1905,7 +1869,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -1918,7 +1882,6 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= name = "nbconvert" version = "7.7.3" description = "Converting Jupyter Notebooks" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1957,7 +1920,6 @@ webpdf = ["playwright"] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1979,7 +1941,6 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.7" description = "Patch asyncio to allow nested event loops" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1991,7 +1952,6 @@ files = [ name = "notebook" version = "7.0.1" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2016,7 +1976,6 @@ test = ["ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[tes name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2034,7 +1993,6 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2046,7 +2004,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2058,7 +2015,6 @@ files = [ name = "pandocfilters" version = "1.5.0" description = "Utilities for writing pandoc filters in python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2070,7 +2026,6 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2086,7 +2041,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2098,7 +2052,6 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2110,7 +2063,6 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -2125,7 +2077,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -2137,7 +2088,6 @@ files = [ name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2149,7 +2099,6 @@ files = [ name = "platformdirs" version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2165,7 +2114,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2181,7 +2129,6 @@ testing = ["pytest", "pytest-benchmark"] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2200,7 +2147,6 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2215,7 +2161,6 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2230,7 +2175,6 @@ wcwidth = "*" name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2257,7 +2201,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2280,7 +2223,6 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -2292,7 +2234,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -2307,7 +2248,6 @@ tests = ["pytest"] name = "pycodestyle" version = "2.11.0" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2319,7 +2259,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2331,7 +2270,6 @@ files = [ name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2349,7 +2287,6 @@ toml = ["tomli (>=1.2.3)"] name = "pyfairdatatools" version = "0.1.3" description = "Tools for AI-READI" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2372,7 +2309,6 @@ validators = ">=0.20.0,<0.21.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2384,7 +2320,6 @@ files = [ name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2399,7 +2334,6 @@ plugins = ["importlib-metadata"] name = "pylint" version = "2.17.5" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2429,7 +2363,6 @@ testutils = ["gitpython (>3)"] name = "pymdown-extensions" version = "10.1" description = "Extension pack for Python Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2445,7 +2378,6 @@ pyyaml = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2468,7 +2400,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2487,7 +2418,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-describe" version = "2.1.0" description = "Describe-style plugin for pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2502,7 +2432,6 @@ pytest = ">=4.6,<8" name = "pytest-expecter" version = "3.0" description = "Better testing with expecter and pytest." -category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2514,7 +2443,6 @@ files = [ name = "pytest-random" version = "0.02" description = "py.test plugin to randomize tests" -category = "dev" optional = false python-versions = "*" files = [ @@ -2528,7 +2456,6 @@ pytest = ">=2.2.3" name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2543,7 +2470,6 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2558,7 +2484,6 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2570,7 +2495,6 @@ files = [ name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2582,7 +2506,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "dev" optional = false python-versions = "*" files = [ @@ -2606,7 +2529,6 @@ files = [ name = "pywinpty" version = "2.0.11" description = "Pseudo terminal support for Windows from Python." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2621,7 +2543,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2630,6 +2551,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2637,8 +2559,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2655,6 +2584,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2662,6 +2592,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2671,7 +2602,6 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2686,7 +2616,6 @@ pyyaml = "*" name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2776,7 +2705,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qtconsole" version = "5.4.3" description = "Jupyter Qt console" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2803,7 +2731,6 @@ test = ["flaky", "pytest", "pytest-qt"] name = "qtpy" version = "2.3.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2821,7 +2748,6 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] name = "referencing" version = "0.30.0" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2837,7 +2763,6 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2859,7 +2784,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2874,7 +2798,6 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2886,7 +2809,6 @@ files = [ name = "rpds-py" version = "0.9.2" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2993,7 +2915,6 @@ files = [ name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3010,7 +2931,6 @@ win32 = ["pywin32"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3022,7 +2942,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3034,7 +2953,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -3046,7 +2964,6 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3058,7 +2975,6 @@ files = [ name = "sqlalchemy" version = "2.0.19" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3106,7 +3022,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} typing-extensions = ">=4.2.0" [package.extras] @@ -3137,7 +3053,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -3157,7 +3072,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "terminado" version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3178,7 +3092,6 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3197,7 +3110,6 @@ test = ["flake8", "isort", "pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3209,7 +3121,6 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3221,7 +3132,6 @@ files = [ name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -3242,7 +3152,6 @@ files = [ name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3258,7 +3167,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-requests" version = "2.31.0.2" description = "Typing stubs for requests" -category = "main" optional = false python-versions = "*" files = [ @@ -3273,7 +3181,6 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" -category = "main" optional = false python-versions = "*" files = [ @@ -3285,7 +3192,6 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3297,7 +3203,6 @@ files = [ name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3312,7 +3217,6 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3329,7 +3233,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3346,7 +3249,6 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3386,7 +3288,6 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -3398,7 +3299,6 @@ files = [ name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3414,7 +3314,6 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "dev" optional = false python-versions = "*" files = [ @@ -3426,7 +3325,6 @@ files = [ name = "websocket-client" version = "1.6.1" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3443,7 +3341,6 @@ test = ["websockets"] name = "werkzeug" version = "2.3.6" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3461,7 +3358,6 @@ watchdog = ["watchdog (>=2.3)"] name = "widgetsnbextension" version = "4.0.8" description = "Jupyter interactive widgets for Jupyter Notebook" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3473,7 +3369,6 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3558,7 +3453,6 @@ files = [ name = "zipp" version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3573,4 +3467,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "66fe72ac723e345f960f9cbb35f9327a70397a17a0fbd64ce4d6c19899a7b3d7" +content-hash = "bd2569c8d76460d2bffa7775b672bf90748bb0e606c9be8a31edf1337eac1b9d" diff --git a/pyproject.toml b/pyproject.toml index 61f3d287..a61e6591 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ faker = "^18.11.2" pyfairdatatools = "0.1.3" psycopg2 = "^2.9.6" python-dotenv = "^1.0.0" +flask-bcrypt = "^1.0.1" [tool.poetry.group.dev.dependencies] From 781482172eff8172c6e712e385058157dbdf1c99 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 19 Sep 2023 12:38:02 -0700 Subject: [PATCH 150/505] fix: set POST for auth/login endpoint --- apis/login.py | 20 ++++++++++++-------- apis/signup_user.py | 2 +- model/user.py | 2 +- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/apis/login.py b/apis/login.py index 73dd8651..9d1b6c44 100644 --- a/apis/login.py +++ b/apis/login.py @@ -9,12 +9,14 @@ "Login", { "id": fields.String(required=True), + "email_address": fields.String(required=True), + "username": fields.String(required=True), "first_name": fields.String(required=True), "last_name": fields.String(required=True), - "created_at": fields.String(required=True), - "updated_on": fields.String(required=True), - "address": fields.String(required=True), - "age": fields.String(required=True), + "orcid": fields.String(required=True), + "hash": fields.String(required=True), + "created_at": fields.Integer(required=True), + "institution": fields.String(required=True) }, ) @@ -25,7 +27,7 @@ class Login(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(login_model) - def get(self, study_id: int): + def get(self): pass @api.response(200, "Success") @@ -35,8 +37,10 @@ def post(self): data = request.json username = data["username"] user = User.query.filter_by(username=username).one_or_none() + if not user: + return "Username is not correct", 403 validate_pass = user.check_password(data["password"]) - if user and validate_pass: - return redirect(url_for("study")) + if not validate_pass: + return 'Password is not correct', 401 else: - return "Username or password is not correct", 403 + return 'Authentication is successful' diff --git a/apis/signup_user.py b/apis/signup_user.py index 061b99b5..719fbd50 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -27,7 +27,7 @@ class SignupUser(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(signup_model) - def get(self, study_id: int): + def get(self): pass @api.response(200, "Success") diff --git a/model/user.py b/model/user.py index 8a32d926..cd42e140 100644 --- a/model/user.py +++ b/model/user.py @@ -57,4 +57,4 @@ def set_password(self, password, data): def check_password(self, password): hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") is_valid = app.bcrypt.check_password_hash(hashed_password, password) - return f"Password: {password}
Hashed Password: {hashed_password}
Is Valid: {is_valid}" + return is_valid From b68a63bb5e0eb90adc1dc7f2b69c955edd7a2cca Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 19 Sep 2023 19:38:48 +0000 Subject: [PATCH 151/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/login.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apis/login.py b/apis/login.py index 9d1b6c44..fd808148 100644 --- a/apis/login.py +++ b/apis/login.py @@ -16,7 +16,7 @@ "orcid": fields.String(required=True), "hash": fields.String(required=True), "created_at": fields.Integer(required=True), - "institution": fields.String(required=True) + "institution": fields.String(required=True), }, ) @@ -41,6 +41,6 @@ def post(self): return "Username is not correct", 403 validate_pass = user.check_password(data["password"]) if not validate_pass: - return 'Password is not correct', 401 + return "Password is not correct", 401 else: - return 'Authentication is successful' + return "Authentication is successful" From b4875a32f3c9c984ed2e9cb6f21fd7e68fed1b55 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 19 Sep 2023 13:03:17 -0700 Subject: [PATCH 152/505] fix: username is commented out from login and signup --- apis/login.py | 6 +++--- apis/signup_user.py | 6 +++--- model/user.py | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/apis/login.py b/apis/login.py index 9d1b6c44..bab7a3b8 100644 --- a/apis/login.py +++ b/apis/login.py @@ -35,10 +35,10 @@ def get(self): # @api.marshal_with(login_model) def post(self): data = request.json - username = data["username"] - user = User.query.filter_by(username=username).one_or_none() + email_address = data["email_address"] + user = User.query.filter_by(email_address=email_address).one_or_none() if not user: - return "Username is not correct", 403 + return "Email is not correct", 403 validate_pass = user.check_password(data["password"]) if not validate_pass: return 'Password is not correct', 401 diff --git a/apis/signup_user.py b/apis/signup_user.py index 27213a27..93a1a414 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -41,9 +41,9 @@ def post(self): user = User.query.filter_by(email_address=data["email_address"]).one_or_none() if user: return "This email address is already in use", 409 - user = User.query.filter_by(username=data["username"]).one_or_none() - if user: - return "This username is already in use", 409 + # user = User.query.filter_by(username=data["username"]).one_or_none() + # if user: + # return "This username is already in use", 409 user = User.from_data(data) db.session.add(user) db.session.commit() diff --git a/model/user.py b/model/user.py index cd42e140..168c9411 100644 --- a/model/user.py +++ b/model/user.py @@ -28,7 +28,7 @@ def to_dict(self): return { "id": self.id, "email_address": self.email_address, - "username": self.username, + "username": self.email_address, "first_name": self.first_name, "last_name": self.last_name, "orcid": self.orcid, @@ -44,7 +44,7 @@ def from_data(data: dict): def update(self, data): self.email_address = data["email_address"] - self.username = data["username"] + self.username = data["email_address"] self.first_name = data["first_name"] self.last_name = data["last_name"] self.orcid = data["orcid"] From aedc21072b6be058e63b7834e41a76c5c0b6a3f3 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 19 Sep 2023 16:03:20 -0700 Subject: [PATCH 153/505] fix: syntax in api endpoint --- apis/contributor.py | 13 +++++++++---- apis/dataset.py | 2 +- apis/dataset_metadata_namespace.py | 2 +- apis/invited_contributor.py | 2 +- apis/login.py | 21 +++------------------ apis/participant.py | 2 +- apis/study.py | 2 +- model/study_contributor.py | 2 +- model/user.py | 1 + 9 files changed, 19 insertions(+), 28 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index ee1128e2..b80d671c 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,8 +1,8 @@ from flask_restx import Namespace, Resource, fields -from model import StudyContributor +from model import StudyContributor, Study, db -api = Namespace("contributor", description="contributors", path="/") +api = Namespace("Contributor", description="Contributors", path="/") contributors_model = api.model( @@ -20,10 +20,15 @@ class AddParticipant(Resource): @api.doc("contributor list") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(contributors_model) + # @api.marshal_with(contributors_model) def get(self, study_id: int): contributors = StudyContributor.query.all() return [c.to_dict() for c in contributors] - def post(self, study_id: int): + def put(self, study_id: int): contributors = StudyContributor.query.all() + + def delete(self, study_id: int): + study = Study.query.get(study_id) + contributors = Study.query.filter_by(contributors=study.study_contributors.user_id) + db.session.delete(contributors) diff --git a/apis/dataset.py b/apis/dataset.py index 0db1e2bf..d0270bbb 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -3,7 +3,7 @@ from model import Dataset, Version, Participant, Study, db -api = Namespace("dataset", description="dataset operations", path="/") +api = Namespace("Dataset", description="Dataset operations", path="/") dataset_versions_model = api.model( diff --git a/apis/dataset_metadata_namespace.py b/apis/dataset_metadata_namespace.py index 57aad026..05837562 100644 --- a/apis/dataset_metadata_namespace.py +++ b/apis/dataset_metadata_namespace.py @@ -1,4 +1,4 @@ from flask_restx import Namespace -api = Namespace("Dataset Metadata", description="dataset operations", path="/") +api = Namespace("Dataset Metadata", description="Dataset operations", path="/") diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 1eff8c4e..38a94d3f 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -9,7 +9,7 @@ ) from flask import request -api = Namespace("invited_contributors", description="invited contributors", path="/") +api = Namespace("Invited_contributors", description="Invited contributors", path="/") contributors_model = api.model( diff --git a/apis/login.py b/apis/login.py index f73d0ea5..812ad627 100644 --- a/apis/login.py +++ b/apis/login.py @@ -3,36 +3,21 @@ from model import User from flask import redirect, url_for -api = Namespace("login", description="login", path="/") +api = Namespace("Login", description="Login", path="/") login_model = api.model( "Login", { - "id": fields.String(required=True), "email_address": fields.String(required=True), - "username": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "orcid": fields.String(required=True), - "hash": fields.String(required=True), - "created_at": fields.Integer(required=True), - "institution": fields.String(required=True), - }, + } ) @api.route("/auth/login") class Login(Resource): - @api.doc("login") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(login_model) - def get(self): - pass - - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(login_model) + @api.marshal_with(login_model) def post(self): data = request.json email_address = data["email_address"] diff --git a/apis/participant.py b/apis/participant.py index 74327f32..97e7e259 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -3,7 +3,7 @@ from model import Participant, Study, db -api = Namespace("participant", description="participant operations", path="/") +api = Namespace("Participant", description="Participant operations", path="/") participant_model = api.model( "Study", diff --git a/apis/study.py b/apis/study.py index 61803787..d7bfd9b2 100644 --- a/apis/study.py +++ b/apis/study.py @@ -3,7 +3,7 @@ from model import Study, db -api = Namespace("study", description="study operations", path="/") +api = Namespace("Study", description="Study operations", path="/") owner = api.model( "Owner", diff --git a/model/study_contributor.py b/model/study_contributor.py index 3307e6c9..5aa6a914 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -25,5 +25,5 @@ def to_dict(self): return { "permission": self.permission, "user_id": self.user_id, - "study_id": self.study_id, + "study_id": self.study_id } diff --git a/model/user.py b/model/user.py index 168c9411..b4b0eb56 100644 --- a/model/user.py +++ b/model/user.py @@ -55,6 +55,7 @@ def set_password(self, password, data): self.hash = hashed_password def check_password(self, password): + # TODO check password lenght and having uppercase letter hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") is_valid = app.bcrypt.check_password_hash(hashed_password, password) return is_valid From d2e019ac12809127b2830092faa7049bf702ad0c Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 19 Sep 2023 16:03:36 -0700 Subject: [PATCH 154/505] feat: download JWT --- apis/signup_user.py | 16 +++++----------- poetry.lock | 19 ++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 24 insertions(+), 12 deletions(-) diff --git a/apis/signup_user.py b/apis/signup_user.py index 93a1a414..a709d06a 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -2,8 +2,10 @@ from flask_restx import Namespace, Resource, fields from model import db, User - -api = Namespace("signup", description="signup user", path="/") +import jwt +encoded = jwt.encode({"some": "payload"}, "secret", algorithm="HS256") +decoded = jwt.decode(encoded, "secret", algorithms=["HS256"]) +api = Namespace("Signup", description="Signup user", path="/") signup_model = api.model( "Signup", @@ -14,7 +16,6 @@ "first_name": fields.String(required=True), "last_name": fields.String(required=True), "orcid": fields.String(required=True), - "hash": fields.String(required=True), "created_at": fields.Integer(required=True), "institution": fields.String(required=True), }, @@ -23,16 +24,9 @@ @api.route("/auth/signup") class SignupUser(Resource): - @api.doc("signup_model ") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(signup_model) - def get(self): - pass - @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(signup_model) + @api.marshal_with(signup_model) def post(self): data = request.json # TODO data[email doesnt exist then raise error; json validation library diff --git a/poetry.lock b/poetry.lock index 4587a113..273af104 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2330,6 +2330,23 @@ files = [ [package.extras] plugins = ["importlib-metadata"] +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pylint" version = "2.17.5" @@ -3467,4 +3484,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "bd2569c8d76460d2bffa7775b672bf90748bb0e606c9be8a31edf1337eac1b9d" +content-hash = "fffa54b318eb08630f9bfb6591a6b53322192e784fdddd79bd87025d7186fb0b" diff --git a/pyproject.toml b/pyproject.toml index a61e6591..0195d31f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ pyfairdatatools = "0.1.3" psycopg2 = "^2.9.6" python-dotenv = "^1.0.0" flask-bcrypt = "^1.0.1" +pyjwt = "^2.8.0" [tool.poetry.group.dev.dependencies] From dc28016f4bae36833fde20deb3d7ac872b5aabfa Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 20 Sep 2023 12:15:53 -0700 Subject: [PATCH 155/505] wip: set JWTin login --- apis/contributor.py | 14 +++++++++++--- apis/login.py | 16 ++++++++++++---- apis/signup_user.py | 4 +--- config.py | 1 + 4 files changed, 25 insertions(+), 10 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index b80d671c..9d00ea79 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -16,7 +16,7 @@ @api.route("/study//contributor") -class AddParticipant(Resource): +class AddContributor(Resource): @api.doc("contributor list") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -28,7 +28,15 @@ def get(self, study_id: int): def put(self, study_id: int): contributors = StudyContributor.query.all() - def delete(self, study_id: int): + +@api.route("/study//contributor/") +class DeleteContributor(Resource): + @api.doc("contributor delete") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, user_id: int): study = Study.query.get(study_id) - contributors = Study.query.filter_by(contributors=study.study_contributors.user_id) + contributors = study.study_contributor db.session.delete(contributors) + db.session.commit() + return 204 diff --git a/apis/login.py b/apis/login.py index 812ad627..de63a857 100644 --- a/apis/login.py +++ b/apis/login.py @@ -1,8 +1,9 @@ -from flask import Response, jsonify, request +from flask import Response, jsonify, request, make_response from flask_restx import Namespace, Resource, fields from model import User -from flask import redirect, url_for +import jwt +import config api = Namespace("Login", description="Login", path="/") login_model = api.model( @@ -17,7 +18,7 @@ class Login(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(login_model) + # @api.marshal_with(login_model) def post(self): data = request.json email_address = data["email_address"] @@ -28,4 +29,11 @@ def post(self): if not validate_pass: return "Password is not correct", 401 else: - return "Authentication is successful" + if len(config.secret) < 14: + raise "secret key should contain at least 14 characters" + encoded_jwt_code = jwt.encode({"user": user.id}, config.secret, algorithm="HS256") + resp = make_response('Setting the cookie') + resp.set_cookie('test', encoded_jwt_code) + return f"Authentication is successful, {resp}" + + diff --git a/apis/signup_user.py b/apis/signup_user.py index a709d06a..c55c0d1f 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -2,9 +2,7 @@ from flask_restx import Namespace, Resource, fields from model import db, User -import jwt -encoded = jwt.encode({"some": "payload"}, "secret", algorithm="HS256") -decoded = jwt.decode(encoded, "secret", algorithms=["HS256"]) + api = Namespace("Signup", description="Signup user", path="/") signup_model = api.model( diff --git a/config.py b/config.py index 1d9dd01f..db074343 100644 --- a/config.py +++ b/config.py @@ -2,3 +2,4 @@ from os import environ FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") +secret = environ.get("secret") From e2d74808104ae9a6d80e13630e413c04c2e3009b Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 26 Sep 2023 10:18:45 -0700 Subject: [PATCH 156/505] wip: login and signup authorization --- apis/contributor.py | 23 +++++++++---- apis/login.py | 84 +++++++++++++++++++++++++++++++++++++-------- apis/signup_user.py | 4 +-- app.py | 11 +++++- 4 files changed, 97 insertions(+), 25 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 9d00ea79..073dafeb 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,5 +1,5 @@ from flask_restx import Namespace, Resource, fields - +from flask import request from model import StudyContributor, Study, db api = Namespace("Contributor", description="Contributors", path="/") @@ -25,18 +25,27 @@ def get(self, study_id: int): contributors = StudyContributor.query.all() return [c.to_dict() for c in contributors] - def put(self, study_id: int): - contributors = StudyContributor.query.all() - @api.route("/study//contributor/") -class DeleteContributor(Resource): +class ContributorResource(Resource): + @api.doc("contributor update") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, user_id): + data = request.json + contributors = StudyContributor.query.filter_by(study_id=study_id, user_id=user_id) + contributors.permission = data + db.session.commit() + return contributors.permission + @api.doc("contributor delete") @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - study = Study.query.get(study_id) - contributors = study.study_contributor + users = g.user.query.get() + contributors = StudyContributor.query.get(study_id=study_id, user_id=user_id) db.session.delete(contributors) db.session.commit() return 204 +# will need to implement it in all endpoints for which that permission is relevant +# Permissions should be only a database query and conditional statement. Failing permissions should result in a 403 diff --git a/apis/login.py b/apis/login.py index de63a857..80f75cd0 100644 --- a/apis/login.py +++ b/apis/login.py @@ -1,17 +1,18 @@ -from flask import Response, jsonify, request, make_response +from flask import jsonify, request, make_response, g from flask_restx import Namespace, Resource, fields from model import User +import uuid +from datetime import timezone +import datetime +from dateutil.parser import parse import jwt import config api = Namespace("Login", description="Login", path="/") -login_model = api.model( - "Login", - { - "email_address": fields.String(required=True), - } -) + +class AccessDenied(Exception): + pass @api.route("/auth/login") @@ -23,17 +24,70 @@ def post(self): data = request.json email_address = data["email_address"] user = User.query.filter_by(email_address=email_address).one_or_none() - if not user: - return "Email is not correct", 403 validate_pass = user.check_password(data["password"]) - if not validate_pass: - return "Password is not correct", 401 + if not user or validate_pass: + return "Invalid credentials", 401 else: if len(config.secret) < 14: raise "secret key should contain at least 14 characters" - encoded_jwt_code = jwt.encode({"user": user.id}, config.secret, algorithm="HS256") - resp = make_response('Setting the cookie') - resp.set_cookie('test', encoded_jwt_code) - return f"Authentication is successful, {resp}" + encoded_jwt_code = jwt.encode( + { + "user": user.id, + "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=60)}, + config.secret, + algorithm="HS256") + resp = make_response(user.to_dict()) + resp.set_cookie('user', encoded_jwt_code) + resp.status = 200 + return resp + + +@api.route("/auth/logout") +class Logout(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self): + resp = make_response() + resp.status = 204 + resp.delete_cookie('user') + return resp + + +@api.route("/auth/current-users") +class CurrentUsers(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + def get(self): + if not g.user: + return None + return g.user.to_dict() + + +def authentication(): + if 'user' not in request.cookies: + return + token = request.cookies.get("user") + try: + decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) + except jwt.ExpiredSignatureError: + return + user = User.query.get(decoded["user"]) + g.user = user + + +def authorization(): + # white listed routes + public_routes = ["/auth/token", "/auth/login", "/auth/sign-up"] + if request.path in public_routes: + return + if g.user: + return + raise AccessDenied("Access denied") +# def permission(): +# if not g.user: +# return +# if g.user.permission == "viewer": +# pass +# # do not allow to make operations on endpoints \ No newline at end of file diff --git a/apis/signup_user.py b/apis/signup_user.py index c55c0d1f..ce009370 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -20,8 +20,8 @@ ) -@api.route("/auth/signup") -class SignupUser(Resource): +@api.route("/auth/sign-up") +class SignUpUser(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(signup_model) diff --git a/app.py b/app.py index 461b305e..0698f6c4 100644 --- a/app.py +++ b/app.py @@ -6,7 +6,7 @@ import model from apis import api from flask_bcrypt import Bcrypt - +from apis.login import authentication, authorization # from pyfairdatatools import __version__ @@ -59,6 +59,15 @@ def create_app(): # """Create the database schema.""" # model.db.create_all() + @app.before_request + def on_before_request(): + authentication() + try: + authorization() + except: + return 'Access denied', 403 + # catch access denied error + @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() From 9462a4aa12d4eda4131c7ca537a43628592ed0e6 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 26 Sep 2023 17:19:29 +0000 Subject: [PATCH 157/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 6 +++++- apis/login.py | 16 ++++++++++------ app.py | 2 +- model/study_contributor.py | 2 +- 4 files changed, 17 insertions(+), 9 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 073dafeb..49d59cef 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -33,7 +33,9 @@ class ContributorResource(Resource): @api.response(400, "Validation Error") def put(self, study_id: int, user_id): data = request.json - contributors = StudyContributor.query.filter_by(study_id=study_id, user_id=user_id) + contributors = StudyContributor.query.filter_by( + study_id=study_id, user_id=user_id + ) contributors.permission = data db.session.commit() return contributors.permission @@ -47,5 +49,7 @@ def delete(self, study_id: int, user_id: int): db.session.delete(contributors) db.session.commit() return 204 + + # will need to implement it in all endpoints for which that permission is relevant # Permissions should be only a database query and conditional statement. Failing permissions should result in a 403 diff --git a/apis/login.py b/apis/login.py index 80f75cd0..3d7afd46 100644 --- a/apis/login.py +++ b/apis/login.py @@ -8,6 +8,7 @@ import jwt import config + api = Namespace("Login", description="Login", path="/") @@ -33,11 +34,14 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=60)}, + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=60), + }, config.secret, - algorithm="HS256") + algorithm="HS256", + ) resp = make_response(user.to_dict()) - resp.set_cookie('user', encoded_jwt_code) + resp.set_cookie("user", encoded_jwt_code) resp.status = 200 return resp @@ -49,7 +53,7 @@ class Logout(Resource): def post(self): resp = make_response() resp.status = 204 - resp.delete_cookie('user') + resp.delete_cookie("user") return resp @@ -64,7 +68,7 @@ def get(self): def authentication(): - if 'user' not in request.cookies: + if "user" not in request.cookies: return token = request.cookies.get("user") try: @@ -90,4 +94,4 @@ def authorization(): # return # if g.user.permission == "viewer": # pass -# # do not allow to make operations on endpoints \ No newline at end of file +# # do not allow to make operations on endpoints diff --git a/app.py b/app.py index 0698f6c4..d619b9e3 100644 --- a/app.py +++ b/app.py @@ -65,7 +65,7 @@ def on_before_request(): try: authorization() except: - return 'Access denied', 403 + return "Access denied", 403 # catch access denied error @app.cli.command("destroy-schema") diff --git a/model/study_contributor.py b/model/study_contributor.py index 5aa6a914..3307e6c9 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -25,5 +25,5 @@ def to_dict(self): return { "permission": self.permission, "user_id": self.user_id, - "study_id": self.study_id + "study_id": self.study_id, } From 0ab71654ce85f12d83c96e2903f2ea73835b78c5 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 26 Sep 2023 11:26:39 -0700 Subject: [PATCH 158/505] fix: signup error --- apis/contributor.py | 5 +++-- apis/login.py | 4 +++- app.py | 6 ++---- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 073dafeb..ac371caf 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,6 +1,6 @@ from flask_restx import Namespace, Resource, fields from flask import request -from model import StudyContributor, Study, db +from model import StudyContributor, Study, db, User api = Namespace("Contributor", description="Contributors", path="/") @@ -42,7 +42,8 @@ def put(self, study_id: int, user_id): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - users = g.user.query.get() + user = User.query.filter_by(user_id=user_id, study_id=study_id) + # if user.permission contributors = StudyContributor.query.get(study_id=study_id, user_id=user_id) db.session.delete(contributors) db.session.commit() diff --git a/apis/login.py b/apis/login.py index 80f75cd0..ee5a6911 100644 --- a/apis/login.py +++ b/apis/login.py @@ -64,8 +64,10 @@ def get(self): def authentication(): + g.user = None if 'user' not in request.cookies: return + # if 'user' in token = request.cookies.get("user") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) @@ -77,7 +79,7 @@ def authentication(): def authorization(): # white listed routes - public_routes = ["/auth/token", "/auth/login", "/auth/sign-up"] + public_routes = ["/auth/token", "/auth/login", "/auth/signup"] if request.path in public_routes: return if g.user: diff --git a/app.py b/app.py index 0698f6c4..1dedf541 100644 --- a/app.py +++ b/app.py @@ -62,10 +62,8 @@ def create_app(): @app.before_request def on_before_request(): authentication() - try: - authorization() - except: - return 'Access denied', 403 + authorization() + # catch access denied error @app.cli.command("destroy-schema") From 62a2656164030390fb6bd3f07baba2b7e1021649 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 26 Sep 2023 18:27:47 +0000 Subject: [PATCH 159/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/login.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/apis/login.py b/apis/login.py index ee5a6911..f55181f5 100644 --- a/apis/login.py +++ b/apis/login.py @@ -8,6 +8,7 @@ import jwt import config + api = Namespace("Login", description="Login", path="/") @@ -33,11 +34,14 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=60)}, + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=60), + }, config.secret, - algorithm="HS256") + algorithm="HS256", + ) resp = make_response(user.to_dict()) - resp.set_cookie('user', encoded_jwt_code) + resp.set_cookie("user", encoded_jwt_code) resp.status = 200 return resp @@ -49,7 +53,7 @@ class Logout(Resource): def post(self): resp = make_response() resp.status = 204 - resp.delete_cookie('user') + resp.delete_cookie("user") return resp @@ -65,7 +69,7 @@ def get(self): def authentication(): g.user = None - if 'user' not in request.cookies: + if "user" not in request.cookies: return # if 'user' in token = request.cookies.get("user") @@ -92,4 +96,4 @@ def authorization(): # return # if g.user.permission == "viewer": # pass -# # do not allow to make operations on endpoints \ No newline at end of file +# # do not allow to make operations on endpoints From dc70c839f2b18142b20ac04a64f9c9249a73244b Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 26 Sep 2023 12:46:35 -0700 Subject: [PATCH 160/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20docker=20?= =?UTF-8?q?file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- db-docker-compose.yaml | 13 ++----------- dev-docker-compose.yaml | 4 ++-- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/db-docker-compose.yaml b/db-docker-compose.yaml index eaabcefb..3961f8f0 100644 --- a/db-docker-compose.yaml +++ b/db-docker-compose.yaml @@ -9,14 +9,5 @@ services: POSTGRES_DB: fairhub_local ports: - 5432:5432 -# volumes: -# - ./postgres-data:/var/lib/postgresql/data -# - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql - # pgadmin: - # image: dpage/pgadmin4 - # restart: always - # environment: - # PGADMIN_DEFAULT_EMAIL: admin@admin.com - # PGADMIN_DEFAULT_PASSWORD: root - # ports: - # - 5050:80 \ No newline at end of file + # volumes: + # - ./postgres-data:/var/lib/postgresql/data \ No newline at end of file diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index f4589595..5a5697d0 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -33,6 +33,6 @@ services: interval: 5s timeout: 5s retries: 5 - volumes: + # volumes: # - ./postgres-data:/var/lib/postgresql/data - - ./sql/init.sql:/docker-entrypoint-initdb.d/1-schema.sql + # - ./sql/init_timezones.sql:/docker-entrypoint-initdb.d/1-schema.sql From 8fe2f6252c964ce99b42b8780bdcc57cfe6349c8 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 26 Sep 2023 16:57:40 -0700 Subject: [PATCH 161/505] fix: signup error --- apis/login.py | 40 ++++++++++++++++++++++++++++++---------- apis/participant.py | 17 +++++++++++++---- apis/study.py | 6 ++++++ app.py | 1 + 4 files changed, 50 insertions(+), 14 deletions(-) diff --git a/apis/login.py b/apis/login.py index ee5a6911..66e2ad24 100644 --- a/apis/login.py +++ b/apis/login.py @@ -1,6 +1,6 @@ from flask import jsonify, request, make_response, g from flask_restx import Namespace, Resource, fields -from model import User +from model import User, StudyContributor import uuid from datetime import timezone import datetime @@ -24,8 +24,10 @@ def post(self): data = request.json email_address = data["email_address"] user = User.query.filter_by(email_address=email_address).one_or_none() + if not user: + return "Invalid credentials", 401 validate_pass = user.check_password(data["password"]) - if not user or validate_pass: + if not validate_pass: return "Invalid credentials", 401 else: if len(config.secret) < 14: @@ -37,7 +39,7 @@ def post(self): config.secret, algorithm="HS256") resp = make_response(user.to_dict()) - resp.set_cookie('user', encoded_jwt_code) + resp.set_cookie('user', encoded_jwt_code, secure=True, httponly=True, samesite='lax') resp.status = 200 return resp @@ -79,7 +81,8 @@ def authentication(): def authorization(): # white listed routes - public_routes = ["/auth/token", "/auth/login", "/auth/signup"] + public_routes = ["/auth/token", "/auth/login", "/auth/sign-up", "/docs", "/echo"] + # print(request.path) if request.path in public_routes: return if g.user: @@ -87,9 +90,26 @@ def authorization(): raise AccessDenied("Access denied") -# def permission(): -# if not g.user: -# return -# if g.user.permission == "viewer": -# pass -# # do not allow to make operations on endpoints \ No newline at end of file +# def is_viewer(study_id: int): +# contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() +# if contributor.permission == "viewer": +# return "Access denied", 403 +# +# +# def is_admin(study_id: int): +# contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() +# if contributor.permission == "admin": +# return "Access denied", 403 +# +# +# def is_editor(study_id: int): +# contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() +# +# if contributor.permission == "admin": +# return "Access denied", 403 + + +def is_granted(permission: str, study_id: int): + contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() + return contributor.permission == permission + diff --git a/apis/participant.py b/apis/participant.py index 97e7e259..f160a159 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -1,8 +1,8 @@ -from flask import Response, jsonify, request +from flask import Response, jsonify, request, g from flask_restx import Namespace, Resource, fields -from model import Participant, Study, db - +from model import Participant, Study, db, StudyContributor +from .login import is_granted api = Namespace("Participant", description="Participant operations", path="/") participant_model = api.model( @@ -31,8 +31,10 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(participant_model) + # @api.marshal_with(participant_model) def post(self, study_id: int): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 study = Study.query.get(study_id) add_participant = Participant.from_data(request.json, study) db.session.add(add_participant) @@ -47,6 +49,9 @@ class UpdateParticipant(Resource): @api.response(400, "Validation Error") @api.marshal_with(participant_model) def put(self, study_id, participant_id: int): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 + is_granted('viewer', study_id) update_participant = Participant.query.get(participant_id) update_participant.update(request.json) db.session.commit() @@ -55,6 +60,10 @@ def put(self, study_id, participant_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id, participant_id: int): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 + is_granted('viewer', study_id) + delete_participant = Participant.query.get(participant_id) db.session.delete(delete_participant) db.session.commit() diff --git a/apis/study.py b/apis/study.py index d7bfd9b2..77472a23 100644 --- a/apis/study.py +++ b/apis/study.py @@ -2,6 +2,7 @@ from flask_restx import Namespace, Resource, fields from model import Study, db +from .login import is_granted api = Namespace("Study", description="Study operations", path="/") @@ -43,6 +44,7 @@ def get(self): return [s.to_dict() for s in studies] def post(self): + add_study = Study.from_data(request.json) db.session.add(add_study) db.session.commit() @@ -60,12 +62,16 @@ def get(self, study_id: int): return study1.to_dict() def put(self, study_id: int): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 update_study = Study.query.get(study_id) update_study.update(request.json) db.session.commit() return update_study.to_dict() def delete(self, study_id: int): + if not is_granted('admin', study_id): + return "Access denied, you can not delete study", 403 delete_study = Study.query.get(study_id) for d in delete_study.dataset: for version in d.dataset_versions: diff --git a/app.py b/app.py index 1dedf541..2aae53a5 100644 --- a/app.py +++ b/app.py @@ -62,6 +62,7 @@ def create_app(): @app.before_request def on_before_request(): authentication() + authorization() # catch access denied error From bffa84f6513d7ac124d69b044a379f02b615540a Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 26 Sep 2023 23:58:38 +0000 Subject: [PATCH 162/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/login.py | 21 ++++++++++++++------- apis/participant.py | 11 ++++++----- apis/study.py | 5 ++--- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/apis/login.py b/apis/login.py index 66e2ad24..d26dfac5 100644 --- a/apis/login.py +++ b/apis/login.py @@ -8,6 +8,7 @@ import jwt import config + api = Namespace("Login", description="Login", path="/") @@ -35,11 +36,16 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=60)}, + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=60), + }, config.secret, - algorithm="HS256") + algorithm="HS256", + ) resp = make_response(user.to_dict()) - resp.set_cookie('user', encoded_jwt_code, secure=True, httponly=True, samesite='lax') + resp.set_cookie( + "user", encoded_jwt_code, secure=True, httponly=True, samesite="lax" + ) resp.status = 200 return resp @@ -51,7 +57,7 @@ class Logout(Resource): def post(self): resp = make_response() resp.status = 204 - resp.delete_cookie('user') + resp.delete_cookie("user") return resp @@ -67,7 +73,7 @@ def get(self): def authentication(): g.user = None - if 'user' not in request.cookies: + if "user" not in request.cookies: return # if 'user' in token = request.cookies.get("user") @@ -110,6 +116,7 @@ def authorization(): def is_granted(permission: str, study_id: int): - contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() + contributor = StudyContributor.query.filter_by( + user_id=g.user.id, study_id=study_id + ).first() return contributor.permission == permission - diff --git a/apis/participant.py b/apis/participant.py index f160a159..f297bc0b 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -3,6 +3,7 @@ from model import Participant, Study, db, StudyContributor from .login import is_granted + api = Namespace("Participant", description="Participant operations", path="/") participant_model = api.model( @@ -33,7 +34,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") # @api.marshal_with(participant_model) def post(self, study_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 study = Study.query.get(study_id) add_participant = Participant.from_data(request.json, study) @@ -49,9 +50,9 @@ class UpdateParticipant(Resource): @api.response(400, "Validation Error") @api.marshal_with(participant_model) def put(self, study_id, participant_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 - is_granted('viewer', study_id) + is_granted("viewer", study_id) update_participant = Participant.query.get(participant_id) update_participant.update(request.json) db.session.commit() @@ -60,9 +61,9 @@ def put(self, study_id, participant_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id, participant_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 - is_granted('viewer', study_id) + is_granted("viewer", study_id) delete_participant = Participant.query.get(participant_id) db.session.delete(delete_participant) diff --git a/apis/study.py b/apis/study.py index 77472a23..5ed843be 100644 --- a/apis/study.py +++ b/apis/study.py @@ -44,7 +44,6 @@ def get(self): return [s.to_dict() for s in studies] def post(self): - add_study = Study.from_data(request.json) db.session.add(add_study) db.session.commit() @@ -62,7 +61,7 @@ def get(self, study_id: int): return study1.to_dict() def put(self, study_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 update_study = Study.query.get(study_id) update_study.update(request.json) @@ -70,7 +69,7 @@ def put(self, study_id: int): return update_study.to_dict() def delete(self, study_id: int): - if not is_granted('admin', study_id): + if not is_granted("admin", study_id): return "Access denied, you can not delete study", 403 delete_study = Study.query.get(study_id) for d in delete_study.dataset: From 66857aa5c8e83a7b315e15dbd83af7a1f34dd0dd Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 26 Sep 2023 17:09:50 -0700 Subject: [PATCH 163/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20route=20?= =?UTF-8?q?whitelist?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/login.py | 37 ++++++++++++++++++++++++++----------- dev-docker-compose.yaml | 1 + 2 files changed, 27 insertions(+), 11 deletions(-) diff --git a/apis/login.py b/apis/login.py index 66e2ad24..53245447 100644 --- a/apis/login.py +++ b/apis/login.py @@ -8,6 +8,7 @@ import jwt import config + api = Namespace("Login", description="Login", path="/") @@ -35,11 +36,16 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=60)}, + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=60), + }, config.secret, - algorithm="HS256") + algorithm="HS256", + ) resp = make_response(user.to_dict()) - resp.set_cookie('user', encoded_jwt_code, secure=True, httponly=True, samesite='lax') + resp.set_cookie( + "user", encoded_jwt_code, secure=True, httponly=True, samesite="lax" + ) resp.status = 200 return resp @@ -51,7 +57,7 @@ class Logout(Resource): def post(self): resp = make_response() resp.status = 204 - resp.delete_cookie('user') + resp.delete_cookie("user") return resp @@ -67,7 +73,7 @@ def get(self): def authentication(): g.user = None - if 'user' not in request.cookies: + if "user" not in request.cookies: return # if 'user' in token = request.cookies.get("user") @@ -81,10 +87,18 @@ def authentication(): def authorization(): # white listed routes - public_routes = ["/auth/token", "/auth/login", "/auth/sign-up", "/docs", "/echo"] - # print(request.path) - if request.path in public_routes: - return + public_routes = [ + "/auth", + "/docs", + "/echo", + "/swaggerui", + "/swagger.json", + ] + + for route in public_routes: + if request.path.startswith(route): + return + if g.user: return raise AccessDenied("Access denied") @@ -110,6 +124,7 @@ def authorization(): def is_granted(permission: str, study_id: int): - contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() + contributor = StudyContributor.query.filter_by( + user_id=g.user.id, study_id=study_id + ).first() return contributor.permission == permission - diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 5a5697d0..7e0303dc 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -16,6 +16,7 @@ services: FLASK_ENV: development FLASK_DEBUG: 1 FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/postgres}" + secret: "aaldkljla;jsdjklajlkkljdkljakjl;d;" depends_on: database: condition: service_healthy From 144fa0ffdaf0f4bd7c29dad602590744ad830dde Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 26 Sep 2023 22:07:53 -0700 Subject: [PATCH 164/505] fix: signup credentials updated --- model/user.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/model/user.py b/model/user.py index b4b0eb56..5c785fda 100644 --- a/model/user.py +++ b/model/user.py @@ -16,12 +16,12 @@ def __init__(self, password, data): id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) username = db.Column(db.String, nullable=False, unique=True) - first_name = db.Column(db.String, nullable=False) - last_name = db.Column(db.String, nullable=False) - orcid = db.Column(db.String, nullable=False) + first_name = db.Column(db.String, nullable=True) + last_name = db.Column(db.String, nullable=True) + orcid = db.Column(db.String, nullable=True) hash = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - institution = db.Column(db.String, nullable=False) + institution = db.Column(db.String, nullable=True) study_contributors = db.relationship("StudyContributor", back_populates="user") def to_dict(self): @@ -44,18 +44,20 @@ def from_data(data: dict): def update(self, data): self.email_address = data["email_address"] - self.username = data["email_address"] - self.first_name = data["first_name"] - self.last_name = data["last_name"] - self.orcid = data["orcid"] - self.institution = data["institution"] + # self.username = data["email_address"] + # self.first_name = data["first_name"] + # self.last_name = data["last_name"] + # self.orcid = data["orcid"] + # self.institution = data["institution"] def set_password(self, password, data): + """setting bcrypt passwords""" hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") self.hash = hashed_password def check_password(self, password): - # TODO check password lenght and having uppercase letter + """validates password and bcrypt hashed password""" + # TODO check password length and make uppercase letter hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") is_valid = app.bcrypt.check_password_hash(hashed_password, password) return is_valid From 2fefcddaf35bfd5fee314b823423de5355574f83 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 10:02:05 -0700 Subject: [PATCH 165/505] fix: authentication models updated --- model/__init__.py | 6 +++++ model/email_verification.py | 23 +++++++++++++++++++ model/token_blacklist.py | 31 +++++++++++++++++++++++++ model/user.py | 27 +++++++++++----------- model/user_details.py | 46 +++++++++++++++++++++++++++++++++++++ 5 files changed, 119 insertions(+), 14 deletions(-) create mode 100644 model/email_verification.py create mode 100644 model/token_blacklist.py create mode 100644 model/user_details.py diff --git a/model/__init__.py b/model/__init__.py index da7b573b..fe179768 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -7,6 +7,9 @@ from .user import User from .dataset import Dataset +from .email_verification import EmailVerification +from .token_blacklist import TokenBlacklist +from .user_details import UserDetails from .dataset_contributor import DatasetContributor from .invited_study_contributor import StudyInvitedContributor from .study_contributor import StudyContributor @@ -110,4 +113,7 @@ "Arm", "StudyInvitedContributor", "StudyException", + "EmailVerification", + "TokenBlacklist", + "UserDetails" ] diff --git a/model/email_verification.py b/model/email_verification.py new file mode 100644 index 00000000..19a640a6 --- /dev/null +++ b/model/email_verification.py @@ -0,0 +1,23 @@ +from datetime import timezone +import datetime +from .db import db + + +class EmailVerification(db.Model): + def __init__(self): + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + __tablename__ = "email_verification" + id = db.Column(db.CHAR(36), primary_key=True) + token = db.Column(db.CHAR(36), nullable=False) + created_at = db.Column(db.CHAR(36), nullable=False) + + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + user = db.relationship("User", back_populates="email_verification") + + def to_dict(self): + return { + "id": self.id, + "user_id": self.user_id, + "token": self.token, + "created_at": self.created_at + } diff --git a/model/token_blacklist.py b/model/token_blacklist.py new file mode 100644 index 00000000..1184d3b8 --- /dev/null +++ b/model/token_blacklist.py @@ -0,0 +1,31 @@ +import uuid +import model +from datetime import timezone +from .db import db +import datetime + + +class TokenBlacklist(db.Model): + + __tablename__ = "token_blacklist" + jti = db.Column(db.CHAR(36), primary_key=True) + exp = db.Column(db.String, nullable=False) + + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + user = db.relationship("User", back_populates="token_blacklist") + + def to_dict(self): + return { + "jti": self.jti, + "exp": self.exp, + } + + @staticmethod + def from_data(data: dict): + token_blacklist = TokenBlacklist() + token_blacklist.update(data) + return token_blacklist + + def update(self, data): + self.jti = data["jti"] + self.exp = data["exp"] diff --git a/model/user.py b/model/user.py index 5c785fda..4804f5a4 100644 --- a/model/user.py +++ b/model/user.py @@ -16,24 +16,23 @@ def __init__(self, password, data): id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) username = db.Column(db.String, nullable=False, unique=True) - first_name = db.Column(db.String, nullable=True) - last_name = db.Column(db.String, nullable=True) - orcid = db.Column(db.String, nullable=True) hash = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - institution = db.Column(db.String, nullable=True) + email_verified = db.Column(db.String, nullable=True) + study_contributors = db.relationship("StudyContributor", back_populates="user") + email_verification = db.relationship("EmailVerification", back_populates="user") + token_blacklist = db.relationship("TokenBlacklist", back_populates="user") + user_details = db.relationship("UserDetails", back_populates="user") def to_dict(self): return { "id": self.id, "email_address": self.email_address, - "username": self.email_address, - "first_name": self.first_name, - "last_name": self.last_name, - "orcid": self.orcid, + "username": self.username, + "hash": self.hash, "created_at": self.created_at, - "institution": self.institution, + "email_verified": self.email_verified, } @staticmethod @@ -44,11 +43,11 @@ def from_data(data: dict): def update(self, data): self.email_address = data["email_address"] - # self.username = data["email_address"] - # self.first_name = data["first_name"] - # self.last_name = data["last_name"] - # self.orcid = data["orcid"] - # self.institution = data["institution"] + self.username = data["email_address"] + # self.email_verified = data["email_verified"] + # self.username = data["username"] + # self.hash = data["hash"] + # self.created_at = data["created_at"] def set_password(self, password, data): """setting bcrypt passwords""" diff --git a/model/user_details.py b/model/user_details.py new file mode 100644 index 00000000..4b8a2cbe --- /dev/null +++ b/model/user_details.py @@ -0,0 +1,46 @@ +import uuid +from .db import db + + +class UserDetails(db.Model): + def __init__(self): + self.id = str(uuid.uuid4()) + + __tablename__ = "user_details" + id = db.Column(db.CHAR(36), primary_key=True) + first_name = db.Column(db.String, nullable=True) + last_name = db.Column(db.String, nullable=True) + institution = db.Column(db.String, nullable=True) + orcid = db.Column(db.String, nullable=True) + location = db.Column(db.String, nullable=True) + + timezone = db.Column(db.String, nullable=True) + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + user = db.relationship("User", back_populates="user_details") + + def to_dict(self): + return { + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "institution": self.institution, + "orcid": self.orcid, + "location": self.location, + "timezone": self.timezone, + } + + @staticmethod + def from_data(data: dict): + user = UserDetails() + user.update(data) + return user + + def update(self, data): + self.first_name = data["first_name"] + self.last_name = data["last_name"] + self.institution = data["institution"] + self.orcid = data["orcid"] + self.location = data["location"] + self.timezone = data["timezone"] + + From 7e395a6205e1c44ea1a72b74fc9794cb3c5d765c Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 27 Sep 2023 17:02:46 +0000 Subject: [PATCH 166/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/__init__.py | 2 +- model/email_verification.py | 3 ++- model/token_blacklist.py | 1 - model/user_details.py | 2 -- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/model/__init__.py b/model/__init__.py index fe179768..772b980f 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -115,5 +115,5 @@ "StudyException", "EmailVerification", "TokenBlacklist", - "UserDetails" + "UserDetails", ] diff --git a/model/email_verification.py b/model/email_verification.py index 19a640a6..63d481fd 100644 --- a/model/email_verification.py +++ b/model/email_verification.py @@ -6,6 +6,7 @@ class EmailVerification(db.Model): def __init__(self): self.created_at = datetime.datetime.now(timezone.utc).timestamp() + __tablename__ = "email_verification" id = db.Column(db.CHAR(36), primary_key=True) token = db.Column(db.CHAR(36), nullable=False) @@ -19,5 +20,5 @@ def to_dict(self): "id": self.id, "user_id": self.user_id, "token": self.token, - "created_at": self.created_at + "created_at": self.created_at, } diff --git a/model/token_blacklist.py b/model/token_blacklist.py index 1184d3b8..0d8a01e1 100644 --- a/model/token_blacklist.py +++ b/model/token_blacklist.py @@ -6,7 +6,6 @@ class TokenBlacklist(db.Model): - __tablename__ = "token_blacklist" jti = db.Column(db.CHAR(36), primary_key=True) exp = db.Column(db.String, nullable=False) diff --git a/model/user_details.py b/model/user_details.py index 4b8a2cbe..c4e6298a 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -42,5 +42,3 @@ def update(self, data): self.orcid = data["orcid"] self.location = data["location"] self.timezone = data["timezone"] - - From 39ab49fbadf216239ab7ca0160a2d5ea83d41444 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 10:03:20 -0700 Subject: [PATCH 167/505] wip: permissions are set up for editor and viewer and admin --- apis/contributor.py | 19 ++++++++++++------- apis/dataset.py | 13 +++++++++++++ apis/invited_contributor.py | 5 +++++ apis/login.py | 25 ++++--------------------- apis/signup_user.py | 7 ++----- 5 files changed, 36 insertions(+), 33 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 767a2d77..39d41d56 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields -from flask import request +from flask import request, g from model import StudyContributor, Study, db, User - +from .login import is_granted api = Namespace("Contributor", description="Contributors", path="/") @@ -32,6 +32,11 @@ class ContributorResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, user_id): + if not is_granted('admin', study_id): + return "Access denied, you can not modify permissions", 403 + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 + data = request.json contributors = StudyContributor.query.filter_by( study_id=study_id, user_id=user_id @@ -44,13 +49,13 @@ def put(self, study_id: int, user_id): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - user = User.query.filter_by(user_id=user_id, study_id=study_id) - # if user.permission - contributors = StudyContributor.query.get(study_id=study_id, user_id=user_id) - db.session.delete(contributors) + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 + contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id) + db.session.delete(contributor) db.session.commit() + print(contributor) return 204 - # will need to implement it in all endpoints for which that permission is relevant # Permissions should be only a database query and conditional statement. Failing permissions should result in a 403 diff --git a/apis/dataset.py b/apis/dataset.py index d0270bbb..f276bd68 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -2,6 +2,7 @@ from flask_restx import Namespace, Resource, fields from model import Dataset, Version, Participant, Study, db +from .login import is_granted api = Namespace("Dataset", description="Dataset operations", path="/") @@ -48,6 +49,8 @@ def get(self, study_id): @api.doc("update dataset") # @api.marshal_with(dataset) def post(self, study_id): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 study = Study.query.get(study_id) # todo if study.participant id== different study Throw error dataset_ = Dataset.from_data(study, request.json) @@ -62,6 +65,8 @@ def post(self, study_id): @api.response(400, "Validation Error") class DatasetResource(Resource): def put(self, study_id, dataset_id): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 data = request.json data_obj = Dataset.query.get(dataset_id) data_obj.update(data) @@ -69,6 +74,8 @@ def put(self, study_id, dataset_id): return data_obj.to_dict() def delete(self, study_id, dataset_id): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 data_obj = Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: db.session.delete(version) @@ -113,12 +120,16 @@ def get(self, study_id, dataset_id, version_id): return dataset_version.to_dict() def put(self, study_id, dataset_id, version_id): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 data_version_obj = Version.query.get(version_id) data_version_obj.update(request.json) db.session.commit() return jsonify(data_version_obj.to_dict()) def delete(self, study_id, dataset_id, version_id): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 data_obj = Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: db.session.delete(version) @@ -133,6 +144,8 @@ def delete(self, study_id, dataset_id, version_id): @api.response(400, "Validation Error") class VersionList(Resource): def post(self, study_id: int, dataset_id: int): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 data = request.json data["participants"] = [Participant.query.get(i) for i in data["participants"]] data_obj = Dataset.query.get(dataset_id) diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 38a94d3f..ff40513d 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -8,6 +8,7 @@ StudyException, ) from flask import request +from .login import is_granted api = Namespace("Invited_contributors", description="Invited contributors", path="/") @@ -29,7 +30,11 @@ class AddInvitedContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): + if is_granted('viewer', study_id): + return "Access denied, you can not modify", 403 # try: + if is_granted('editor', study_id): + return "Access denied, you can not invite other contributors", 403 study_obj = Study.query.get(study_id) data = request.json email_address = data["email_address"] diff --git a/apis/login.py b/apis/login.py index 53245447..a88fa0f7 100644 --- a/apis/login.py +++ b/apis/login.py @@ -79,13 +79,16 @@ def authentication(): token = request.cookies.get("user") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) + except jwt.ExpiredSignatureError: return user = User.query.get(decoded["user"]) g.user = user + decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) def authorization(): + """check whether url is allowed to be reached""" # white listed routes public_routes = [ "/auth", @@ -94,36 +97,16 @@ def authorization(): "/swaggerui", "/swagger.json", ] - for route in public_routes: if request.path.startswith(route): return - if g.user: return raise AccessDenied("Access denied") -# def is_viewer(study_id: int): -# contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() -# if contributor.permission == "viewer": -# return "Access denied", 403 -# -# -# def is_admin(study_id: int): -# contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() -# if contributor.permission == "admin": -# return "Access denied", 403 -# -# -# def is_editor(study_id: int): -# contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).first() -# -# if contributor.permission == "admin": -# return "Access denied", 403 - - def is_granted(permission: str, study_id: int): + """filters users and checks whether current permission equal to passed permission""" contributor = StudyContributor.query.filter_by( user_id=g.user.id, study_id=study_id ).first() diff --git a/apis/signup_user.py b/apis/signup_user.py index ce009370..2d466efe 100644 --- a/apis/signup_user.py +++ b/apis/signup_user.py @@ -10,17 +10,14 @@ { "id": fields.String(required=True), "email_address": fields.String(required=True), + "email_verified": fields.String(required=True), "username": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "orcid": fields.String(required=True), "created_at": fields.Integer(required=True), - "institution": fields.String(required=True), }, ) -@api.route("/auth/sign-up") +@api.route("/auth/signup") class SignUpUser(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") From 361943dc8f70f7bba074929b8046cca217895d63 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 27 Sep 2023 17:04:10 +0000 Subject: [PATCH 168/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 12 ++++++++---- apis/dataset.py | 12 ++++++------ apis/invited_contributor.py | 4 ++-- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 39d41d56..1cff5252 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -2,6 +2,7 @@ from flask import request, g from model import StudyContributor, Study, db, User from .login import is_granted + api = Namespace("Contributor", description="Contributors", path="/") @@ -32,9 +33,9 @@ class ContributorResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, user_id): - if not is_granted('admin', study_id): + if not is_granted("admin", study_id): return "Access denied, you can not modify permissions", 403 - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data = request.json @@ -49,13 +50,16 @@ def put(self, study_id: int, user_id): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 - contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id) + contributor = StudyContributor.query.filter_by( + user_id=g.user.id, study_id=study_id + ) db.session.delete(contributor) db.session.commit() print(contributor) return 204 + # will need to implement it in all endpoints for which that permission is relevant # Permissions should be only a database query and conditional statement. Failing permissions should result in a 403 diff --git a/apis/dataset.py b/apis/dataset.py index f276bd68..41d28f4e 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -49,7 +49,7 @@ def get(self, study_id): @api.doc("update dataset") # @api.marshal_with(dataset) def post(self, study_id): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 study = Study.query.get(study_id) # todo if study.participant id== different study Throw error @@ -65,7 +65,7 @@ def post(self, study_id): @api.response(400, "Validation Error") class DatasetResource(Resource): def put(self, study_id, dataset_id): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data = request.json data_obj = Dataset.query.get(dataset_id) @@ -74,7 +74,7 @@ def put(self, study_id, dataset_id): return data_obj.to_dict() def delete(self, study_id, dataset_id): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data_obj = Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: @@ -120,7 +120,7 @@ def get(self, study_id, dataset_id, version_id): return dataset_version.to_dict() def put(self, study_id, dataset_id, version_id): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data_version_obj = Version.query.get(version_id) data_version_obj.update(request.json) @@ -128,7 +128,7 @@ def put(self, study_id, dataset_id, version_id): return jsonify(data_version_obj.to_dict()) def delete(self, study_id, dataset_id, version_id): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data_obj = Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: @@ -144,7 +144,7 @@ def delete(self, study_id, dataset_id, version_id): @api.response(400, "Validation Error") class VersionList(Resource): def post(self, study_id: int, dataset_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data = request.json data["participants"] = [Participant.query.get(i) for i in data["participants"]] diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index ff40513d..37be395c 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -30,10 +30,10 @@ class AddInvitedContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 # try: - if is_granted('editor', study_id): + if is_granted("editor", study_id): return "Access denied, you can not invite other contributors", 403 study_obj = Study.query.get(study_id) data = request.json From 8d8574ccb5e820979b251cd7d3c46c7e0fb8e15a Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 10:27:40 -0700 Subject: [PATCH 169/505] fix: user details relation is slightly updated due to 1-to-1 --- model/user_details.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/model/user_details.py b/model/user_details.py index c4e6298a..3ba4a161 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -5,6 +5,12 @@ class UserDetails(db.Model): def __init__(self): self.id = str(uuid.uuid4()) + self.first_name = '' + self.last_name = '' + self.institution = '' + self.location = '' + self.timezone = '' + self.orcid = '' __tablename__ = "user_details" id = db.Column(db.CHAR(36), primary_key=True) From 789db50931b2e401a1c9407f322e2ad37e200a5a Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 10:52:01 -0700 Subject: [PATCH 170/505] fix: refactor login and signup to union name --- apis/__init__.py | 6 +-- apis/{login.py => authentication.py} | 56 ++++++++++++++++++++++++---- apis/contributor.py | 2 +- apis/dataset.py | 2 +- apis/invited_contributor.py | 2 +- apis/participant.py | 2 +- apis/study.py | 2 +- app.py | 2 +- 8 files changed, 56 insertions(+), 18 deletions(-) rename apis/{login.py => authentication.py} (61%) diff --git a/apis/__init__.py b/apis/__init__.py index 52e84cd1..282a256e 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -11,8 +11,7 @@ from .study import api as study_api from .invited_contributor import api as invited_contributors -from .signup_user import api as signup -from .login import api as login +from .authentication import api as authentication from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd @@ -66,8 +65,7 @@ api.add_namespace(dataset_metadata_namespace) api.add_namespace(study_metadata_namespace) -api.add_namespace(signup) -api.add_namespace(login) +api.add_namespace(authentication) @api.route("/echo", endpoint="echo") diff --git a/apis/login.py b/apis/authentication.py similarity index 61% rename from apis/login.py rename to apis/authentication.py index a88fa0f7..bed34fb0 100644 --- a/apis/login.py +++ b/apis/authentication.py @@ -1,21 +1,54 @@ -from flask import jsonify, request, make_response, g +from flask import request, make_response, g from flask_restx import Namespace, Resource, fields -from model import User, StudyContributor -import uuid +from model import StudyContributor from datetime import timezone import datetime from dateutil.parser import parse - +from model import db, User import jwt import config -api = Namespace("Login", description="Login", path="/") +api = Namespace("Authentication", description="Authentication paths", path="/") + +signup_model = api.model( + "Signup", + { + "id": fields.String(required=True), + "email_address": fields.String(required=True), + "email_verified": fields.String(required=True), + "username": fields.String(required=True), + "created_at": fields.Integer(required=True), + }, +) class AccessDenied(Exception): pass +@api.route("/auth/signup") +class SignUpUser(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(signup_model) + def post(self): + data = request.json + # TODO data[email doesnt exist then raise error; json validation library + if not data["email_address"]: + raise "Email is not found" + user = User.query.filter_by(email_address=data["email_address"]).one_or_none() + if user: + return "This email address is already in use", 409 + # user = User.query.filter_by(username=data["username"]).one_or_none() + # if user: + # return "This username is already in use", 409 + user = User.from_data(data) + db.session.add(user) + db.session.commit() + return user.to_dict(), 201 + + + @api.route("/auth/login") class Login(Resource): @api.response(200, "Success") @@ -63,6 +96,7 @@ def post(self): @api.route("/auth/current-users") class CurrentUsers(Resource): + """function is used to see all logged users in the system. For now, it is used for testing purposes""" @api.response(200, "Success") @api.response(400, "Validation Error") def get(self): @@ -79,12 +113,18 @@ def authentication(): token = request.cookies.get("user") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) - except jwt.ExpiredSignatureError: - return + return "Session time is over", 401 user = User.query.get(decoded["user"]) g.user = user - decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) + # refreshed_encoded = jwt.decode({"exp": datetime.datetime.now(timezone.utc) + # + datetime.timedelta(minutes=60)}, + # config.secret, algorithms=["HS256"]) + # resp = make_response(user.to_dict()) + # resp.set_cookie( + # "user", refreshed_encoded, secure=True, httponly=True, samesite="lax" + # ) + # resp.status = 200 def authorization(): diff --git a/apis/contributor.py b/apis/contributor.py index 39d41d56..24f8ac5f 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,7 +1,7 @@ from flask_restx import Namespace, Resource, fields from flask import request, g from model import StudyContributor, Study, db, User -from .login import is_granted +from .authentication import is_granted api = Namespace("Contributor", description="Contributors", path="/") diff --git a/apis/dataset.py b/apis/dataset.py index f276bd68..744da5bf 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -2,7 +2,7 @@ from flask_restx import Namespace, Resource, fields from model import Dataset, Version, Participant, Study, db -from .login import is_granted +from .authentication import is_granted api = Namespace("Dataset", description="Dataset operations", path="/") diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index ff40513d..7d22e992 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -8,7 +8,7 @@ StudyException, ) from flask import request -from .login import is_granted +from .authentication import is_granted api = Namespace("Invited_contributors", description="Invited contributors", path="/") diff --git a/apis/participant.py b/apis/participant.py index f297bc0b..e556fa12 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -2,7 +2,7 @@ from flask_restx import Namespace, Resource, fields from model import Participant, Study, db, StudyContributor -from .login import is_granted +from .authentication import is_granted api = Namespace("Participant", description="Participant operations", path="/") diff --git a/apis/study.py b/apis/study.py index 5ed843be..7c50a64a 100644 --- a/apis/study.py +++ b/apis/study.py @@ -2,7 +2,7 @@ from flask_restx import Namespace, Resource, fields from model import Study, db -from .login import is_granted +from .authentication import is_granted api = Namespace("Study", description="Study operations", path="/") diff --git a/app.py b/app.py index 2aae53a5..7673d4da 100644 --- a/app.py +++ b/app.py @@ -6,7 +6,7 @@ import model from apis import api from flask_bcrypt import Bcrypt -from apis.login import authentication, authorization +from apis.authentication import authentication, authorization # from pyfairdatatools import __version__ From 591b3dc073a71195e254b64bc385edf1e5d46db6 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 27 Sep 2023 17:53:20 +0000 Subject: [PATCH 171/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 4 ++-- apis/contributor.py | 12 ++++++++---- model/user_details.py | 12 ++++++------ 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index bed34fb0..bc5ee645 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -1,4 +1,4 @@ -from flask import request, make_response, g +from flask import request, make_response, g from flask_restx import Namespace, Resource, fields from model import StudyContributor from datetime import timezone @@ -48,7 +48,6 @@ def post(self): return user.to_dict(), 201 - @api.route("/auth/login") class Login(Resource): @api.response(200, "Success") @@ -97,6 +96,7 @@ def post(self): @api.route("/auth/current-users") class CurrentUsers(Resource): """function is used to see all logged users in the system. For now, it is used for testing purposes""" + @api.response(200, "Success") @api.response(400, "Validation Error") def get(self): diff --git a/apis/contributor.py b/apis/contributor.py index 24f8ac5f..2d01630a 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -2,6 +2,7 @@ from flask import request, g from model import StudyContributor, Study, db, User from .authentication import is_granted + api = Namespace("Contributor", description="Contributors", path="/") @@ -32,9 +33,9 @@ class ContributorResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, user_id): - if not is_granted('admin', study_id): + if not is_granted("admin", study_id): return "Access denied, you can not modify permissions", 403 - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data = request.json @@ -49,13 +50,16 @@ def put(self, study_id: int, user_id): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - if is_granted('viewer', study_id): + if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 - contributor = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id) + contributor = StudyContributor.query.filter_by( + user_id=g.user.id, study_id=study_id + ) db.session.delete(contributor) db.session.commit() print(contributor) return 204 + # will need to implement it in all endpoints for which that permission is relevant # Permissions should be only a database query and conditional statement. Failing permissions should result in a 403 diff --git a/model/user_details.py b/model/user_details.py index 3ba4a161..b0961eaf 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -5,12 +5,12 @@ class UserDetails(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - self.first_name = '' - self.last_name = '' - self.institution = '' - self.location = '' - self.timezone = '' - self.orcid = '' + self.first_name = "" + self.last_name = "" + self.institution = "" + self.location = "" + self.timezone = "" + self.orcid = "" __tablename__ = "user_details" id = db.Column(db.CHAR(36), primary_key=True) From 2fde0a348def8c20462b42faada573247e5b3989 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 12:03:42 -0700 Subject: [PATCH 172/505] feat: set REFRESH token --- apis/authentication.py | 45 +++++++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index bed34fb0..fd8c48b5 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -83,6 +83,28 @@ def post(self): return resp +def authentication(): + """it authenticates users to a study, sets access and refresh token. + In addition, it handles error handling of expired token and non existed users""" + g.user = None + if "user" not in request.cookies: + return + # if 'user' in + token = request.cookies.get("user") + try: + decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) + except jwt.ExpiredSignatureError: + # Handle token expiration error here (e.g., re-authenticate the user) + return "Token has expired, please re-authenticate", 401 + user = User.query.get(decoded["user"]) + g.user = user + expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=60) + new_token = jwt.encode({"exp": expires}, config.secret, algorithm="HS256") + resp = make_response("Token refreshed") + resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") + return resp + + @api.route("/auth/logout") class Logout(Resource): @api.response(200, "Success") @@ -105,30 +127,9 @@ def get(self): return g.user.to_dict() -def authentication(): - g.user = None - if "user" not in request.cookies: - return - # if 'user' in - token = request.cookies.get("user") - try: - decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) - except jwt.ExpiredSignatureError: - return "Session time is over", 401 - user = User.query.get(decoded["user"]) - g.user = user - # refreshed_encoded = jwt.decode({"exp": datetime.datetime.now(timezone.utc) - # + datetime.timedelta(minutes=60)}, - # config.secret, algorithms=["HS256"]) - # resp = make_response(user.to_dict()) - # resp.set_cookie( - # "user", refreshed_encoded, secure=True, httponly=True, samesite="lax" - # ) - # resp.status = 200 - def authorization(): - """check whether url is allowed to be reached""" + """it checks whether url is allowed to be reached""" # white listed routes public_routes = [ "/auth", From b2ac60c73f3818b05a941565ad7cd23bae87866d Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 27 Sep 2023 19:04:36 +0000 Subject: [PATCH 173/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index f8f3caee..b001cbe1 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -97,7 +97,9 @@ def authentication(): return "Token has expired, please re-authenticate", 401 user = User.query.get(decoded["user"]) g.user = user - expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=60) + expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=60 + ) new_token = jwt.encode({"exp": expires}, config.secret, algorithm="HS256") resp = make_response("Token refreshed") resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") @@ -127,7 +129,6 @@ def get(self): return g.user.to_dict() - def authorization(): """it checks whether url is allowed to be reached""" # white listed routes From 1ed8105804a34af1c9045950b1735bf1cdd36364 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 12:07:46 -0700 Subject: [PATCH 174/505] feat: added comments to authentication functions --- apis/authentication.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index f8f3caee..78d690bd 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -28,10 +28,12 @@ class AccessDenied(Exception): @api.route("/auth/signup") class SignUpUser(Resource): + @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(signup_model) def post(self): + """signs up the new users and saves data in DB""" data = request.json # TODO data[email doesnt exist then raise error; json validation library if not data["email_address"]: @@ -54,6 +56,8 @@ class Login(Resource): @api.response(400, "Validation Error") # @api.marshal_with(login_model) def post(self): + """logs in user and handles few authentication errors. + Also, it sets token for logged user along with expiration date""" data = request.json email_address = data["email_address"] user = User.query.filter_by(email_address=email_address).one_or_none() @@ -109,6 +113,7 @@ class Logout(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") def post(self): + """simply logges out user from the system""" resp = make_response() resp.status = 204 resp.delete_cookie("user") @@ -118,7 +123,6 @@ def post(self): @api.route("/auth/current-users") class CurrentUsers(Resource): """function is used to see all logged users in the system. For now, it is used for testing purposes""" - @api.response(200, "Success") @api.response(400, "Validation Error") def get(self): @@ -127,7 +131,6 @@ def get(self): return g.user.to_dict() - def authorization(): """it checks whether url is allowed to be reached""" # white listed routes From b4aa3601f07beca57f94034f765d11207d4e995e Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 12:14:23 -0700 Subject: [PATCH 175/505] fix: trivial issue in refresh tokens --- apis/authentication.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/authentication.py b/apis/authentication.py index 78d690bd..95f54dda 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -102,7 +102,7 @@ def authentication(): user = User.query.get(decoded["user"]) g.user = user expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=60) - new_token = jwt.encode({"exp": expires}, config.secret, algorithm="HS256") + new_token = jwt.encode({"user": g.user, "exp": expires}, config.secret, algorithm="HS256") resp = make_response("Token refreshed") resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") return resp From d028241a33849f850bfc786e97d33aaf0e69c40e Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 27 Sep 2023 19:15:16 +0000 Subject: [PATCH 176/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 95f54dda..c13f53db 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -28,7 +28,6 @@ class AccessDenied(Exception): @api.route("/auth/signup") class SignUpUser(Resource): - @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(signup_model) @@ -101,8 +100,12 @@ def authentication(): return "Token has expired, please re-authenticate", 401 user = User.query.get(decoded["user"]) g.user = user - expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=60) - new_token = jwt.encode({"user": g.user, "exp": expires}, config.secret, algorithm="HS256") + expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=60 + ) + new_token = jwt.encode( + {"user": g.user, "exp": expires}, config.secret, algorithm="HS256" + ) resp = make_response("Token refreshed") resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") return resp @@ -123,6 +126,7 @@ def post(self): @api.route("/auth/current-users") class CurrentUsers(Resource): """function is used to see all logged users in the system. For now, it is used for testing purposes""" + @api.response(200, "Success") @api.response(400, "Validation Error") def get(self): From 06f8d008ec15c6fdf830dfeec0035d83a8421e59 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 16:13:19 -0700 Subject: [PATCH 177/505] fix: study GET query --- apis/study.py | 40 ++++++++++++++++------------------------ 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/apis/study.py b/apis/study.py index 7c50a64a..261d2e23 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,34 +1,17 @@ -from flask import request +from flask import request, g from flask_restx import Namespace, Resource, fields -from model import Study, db +from model import Study, db, User from .authentication import is_granted api = Namespace("Study", description="Study operations", path="/") -owner = api.model( - "Owner", - { - "id": fields.String(required=True), - "affiliations": fields.String(required=True), - "email": fields.String(required=True), - "first_name": fields.String(required=True), - "last_name": fields.String(required=True), - "orcid": fields.String(required=True), - "roles": fields.List(fields.String, required=True), - "permission": fields.String(required=True), - "status": fields.String(required=True), - }, -) -study = api.model( +study_model = api.model( "Study", { - "id": fields.String(required=True), - "title": fields.String(required=True), - "image": fields.String(required=True), - "last_updated": fields.String(required=True), - "owner": fields.Nested(owner, required=True), + "title": fields.String(required=True, default=""), + "image": fields.String(required=True, default=""), }, ) @@ -40,9 +23,13 @@ class Studies(Resource): @api.response(400, "Validation Error") # @api.marshal_with(study) def get(self): - studies = Study.query.all() + """this code ensure each user access and see only allowed studies""" + studies = Study.query.filter(Study.study_contributors.any(User.id == g.user.id)).all() return [s.to_dict() for s in studies] + @api.expect(study_model) + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self): add_study = Study.from_data(request.json) db.session.add(add_study) @@ -60,6 +47,9 @@ def get(self, study_id: int): study1 = Study.query.get(study_id) return study1.to_dict() + @api.expect(study_model) + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 @@ -69,7 +59,7 @@ def put(self, study_id: int): return update_study.to_dict() def delete(self, study_id: int): - if not is_granted("admin", study_id): + if not is_granted("owner", study_id): return "Access denied, you can not delete study", 403 delete_study = Study.query.get(study_id) for d in delete_study.dataset: @@ -81,6 +71,8 @@ def delete(self, study_id: int): db.session.delete(d) for p in delete_study.participants: db.session.delete(p) + for c in delete_study.study_contributors: + db.session.delete(c) db.session.delete(delete_study) db.session.commit() return "", 204 From 6e31cec479c2b8814d8ea95bd0549774cd5bae86 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 17:15:48 -0700 Subject: [PATCH 178/505] fix: login return --- apis/authentication.py | 31 ++++++++++++++++++++----------- model/user.py | 11 +++++------ model/user_details.py | 8 ++++---- 3 files changed, 29 insertions(+), 21 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 95f54dda..e9354405 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -4,7 +4,7 @@ from datetime import timezone import datetime from dateutil.parser import parse -from model import db, User +from model import db, User, UserDetails import jwt import config @@ -13,14 +13,18 @@ signup_model = api.model( "Signup", { - "id": fields.String(required=True), - "email_address": fields.String(required=True), - "email_verified": fields.String(required=True), - "username": fields.String(required=True), - "created_at": fields.Integer(required=True), + "email_address": fields.String(required=True, default="sample@gmail.com"), + "password": fields.String(required=True, default=""), }, ) +login_model = api.model( + "Login", + { + "email_address": fields.String(required=True, default=""), + "password": fields.String(required=True, default=""), + }, +) class AccessDenied(Exception): pass @@ -31,7 +35,8 @@ class SignUpUser(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(signup_model) + # @api.marshal_with(signup_model) + @api.expect(signup_model) def post(self): """signs up the new users and saves data in DB""" data = request.json @@ -44,10 +49,11 @@ def post(self): # user = User.query.filter_by(username=data["username"]).one_or_none() # if user: # return "This username is already in use", 409 - user = User.from_data(data) - db.session.add(user) + user_add = User.from_data(data) + # user.user_details.update(data) + db.session.add(user_add) db.session.commit() - return user.to_dict(), 201 + return user_add.to_dict(), 201 @api.route("/auth/login") @@ -55,6 +61,7 @@ class Login(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(login_model) + @api.expect(login_model) def post(self): """logs in user and handles few authentication errors. Also, it sets token for logged user along with expiration date""" @@ -100,9 +107,11 @@ def authentication(): # Handle token expiration error here (e.g., re-authenticate the user) return "Token has expired, please re-authenticate", 401 user = User.query.get(decoded["user"]) + # if decoded in user.token_blacklist: + # return "authentication failed", 403 g.user = user expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=60) - new_token = jwt.encode({"user": g.user, "exp": expires}, config.secret, algorithm="HS256") + new_token = jwt.encode({"user": user.id, "exp": expires}, config.secret, algorithm="HS256") resp = make_response("Token refreshed") resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") return resp diff --git a/model/user.py b/model/user.py index 4804f5a4..cedfe09d 100644 --- a/model/user.py +++ b/model/user.py @@ -4,14 +4,14 @@ from datetime import timezone import datetime import app - +import model class User(db.Model): def __init__(self, password, data): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() self.set_password(password, data) - + self.user_details = model.UserDetails(self) __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) @@ -23,16 +23,15 @@ def __init__(self, password, data): study_contributors = db.relationship("StudyContributor", back_populates="user") email_verification = db.relationship("EmailVerification", back_populates="user") token_blacklist = db.relationship("TokenBlacklist", back_populates="user") - user_details = db.relationship("UserDetails", back_populates="user") + user_details = db.relationship("UserDetails", uselist=False, back_populates="user") def to_dict(self): return { "id": self.id, "email_address": self.email_address, "username": self.username, - "hash": self.hash, - "created_at": self.created_at, - "email_verified": self.email_verified, + "first_name": self.user_details.first_name, + "last_name": self.user_details.last_name, } @staticmethod diff --git a/model/user_details.py b/model/user_details.py index b0961eaf..875619cd 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -3,7 +3,7 @@ class UserDetails(db.Model): - def __init__(self): + def __init__(self, user): self.id = str(uuid.uuid4()) self.first_name = "" self.last_name = "" @@ -11,7 +11,7 @@ def __init__(self): self.location = "" self.timezone = "" self.orcid = "" - + self.user = user __tablename__ = "user_details" id = db.Column(db.CHAR(36), primary_key=True) first_name = db.Column(db.String, nullable=True) @@ -36,8 +36,8 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - user = UserDetails() + def from_data(user, data: dict): + user = UserDetails(user) user.update(data) return user From c6d9b5ec2157994c32d6ab87df4aba7be3535b8e Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 28 Sep 2023 00:17:34 +0000 Subject: [PATCH 179/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 13 +++++++++---- apis/study.py | 4 +++- model/user.py | 2 ++ model/user_details.py | 1 + 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index e9354405..a48f35cd 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -14,7 +14,7 @@ "Signup", { "email_address": fields.String(required=True, default="sample@gmail.com"), - "password": fields.String(required=True, default=""), + "password": fields.String(required=True, default=""), }, ) @@ -26,13 +26,13 @@ }, ) + class AccessDenied(Exception): pass @api.route("/auth/signup") class SignUpUser(Resource): - @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(signup_model) @@ -110,8 +110,12 @@ def authentication(): # if decoded in user.token_blacklist: # return "authentication failed", 403 g.user = user - expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=60) - new_token = jwt.encode({"user": user.id, "exp": expires}, config.secret, algorithm="HS256") + expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=60 + ) + new_token = jwt.encode( + {"user": user.id, "exp": expires}, config.secret, algorithm="HS256" + ) resp = make_response("Token refreshed") resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") return resp @@ -132,6 +136,7 @@ def post(self): @api.route("/auth/current-users") class CurrentUsers(Resource): """function is used to see all logged users in the system. For now, it is used for testing purposes""" + @api.response(200, "Success") @api.response(400, "Validation Error") def get(self): diff --git a/apis/study.py b/apis/study.py index 261d2e23..92652e25 100644 --- a/apis/study.py +++ b/apis/study.py @@ -24,7 +24,9 @@ class Studies(Resource): # @api.marshal_with(study) def get(self): """this code ensure each user access and see only allowed studies""" - studies = Study.query.filter(Study.study_contributors.any(User.id == g.user.id)).all() + studies = Study.query.filter( + Study.study_contributors.any(User.id == g.user.id) + ).all() return [s.to_dict() for s in studies] @api.expect(study_model) diff --git a/model/user.py b/model/user.py index cedfe09d..5595a3fd 100644 --- a/model/user.py +++ b/model/user.py @@ -6,12 +6,14 @@ import app import model + class User(db.Model): def __init__(self, password, data): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() self.set_password(password, data) self.user_details = model.UserDetails(self) + __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) diff --git a/model/user_details.py b/model/user_details.py index 875619cd..280fed21 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -12,6 +12,7 @@ def __init__(self, user): self.timezone = "" self.orcid = "" self.user = user + __tablename__ = "user_details" id = db.Column(db.CHAR(36), primary_key=True) first_name = db.Column(db.String, nullable=True) From f4ed10db30b77555335a7839bfc1ab91bc0787b5 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 27 Sep 2023 17:22:21 -0700 Subject: [PATCH 180/505] fix: update signup return --- apis/authentication.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/authentication.py b/apis/authentication.py index e9354405..815fd6ad 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -53,7 +53,7 @@ def post(self): # user.user_details.update(data) db.session.add(user_add) db.session.commit() - return user_add.to_dict(), 201 + return f"Hi, {user_add.email_address}, you have successfully signed up", 201 @api.route("/auth/login") From 76e5124fcef55402bc6b18489cef1bee72ba6a5b Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 27 Sep 2023 19:03:18 -0700 Subject: [PATCH 181/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20cors=20f?= =?UTF-8?q?or=20credentials=20support?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 17 ++++++++++++++++- dev-docker-compose.yaml | 4 ++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/app.py b/app.py index 7673d4da..d1212a97 100644 --- a/app.py +++ b/app.py @@ -45,7 +45,22 @@ def create_app(): model.db.init_app(app) api.init_app(app) bcrypt.init_app(app) - CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) + + # Only allow CORS origin for localhost:3000 + CORS( + app, + resources={ + r"/*": { + "origins": "http://localhost:3000", + } + }, + supports_credentials=True, + ) + + app.config["CORS_ALLOW_CREDENTIALS"] = True + + # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) + # # @app.cli.command("create-schema") # def create_schema(): diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 7e0303dc..7caee27a 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -34,6 +34,6 @@ services: interval: 5s timeout: 5s retries: 5 - # volumes: - # - ./postgres-data:/var/lib/postgresql/data + volumes: + - ./postgres-data:/var/lib/postgresql/data # - ./sql/init_timezones.sql:/docker-entrypoint-initdb.d/1-schema.sql From f5ed082e96ce88ce0c7e97eb75cacf58ed074901 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 27 Sep 2023 22:28:52 -0700 Subject: [PATCH 182/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20cors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 ++ app.py | 19 ++++++++++++++----- dev-docker-compose.yaml | 2 +- 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index b7f9d18f..6856a720 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -105,6 +105,7 @@ def authentication(): decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: # Handle token expiration error here (e.g., re-authenticate the user) + # TODO: delete the cookie return "Token has expired, please re-authenticate", 401 user = User.query.get(decoded["user"]) # if decoded in user.token_blacklist: @@ -155,6 +156,7 @@ def authorization(): "/swaggerui", "/swagger.json", ] + print(g.user) for route in public_routes: if request.path.startswith(route): return diff --git a/app.py b/app.py index d1212a97..534b60c6 100644 --- a/app.py +++ b/app.py @@ -1,8 +1,9 @@ """Entry point for the application.""" -from flask import Flask +from flask import Flask, request from flask_cors import CORS from sqlalchemy import MetaData + import model from apis import api from flask_bcrypt import Bcrypt @@ -32,6 +33,8 @@ def create_app(): # print(app.config) + # TODO: add a check for secret key + if "DATABASE_URL" in app.config: # if "TESTING" in app_config and app_config["TESTING"]: # pass @@ -50,15 +53,21 @@ def create_app(): CORS( app, resources={ - r"/*": { - "origins": "http://localhost:3000", + "/*": { + "origins": [ + "http://localhost:3000", + ], } }, + allow_headers=[ + "Content-Type", + "Authorization", + "Access-Control-Allow-Origin", + "Access-Control-Allow-Credentials", + ], supports_credentials=True, ) - app.config["CORS_ALLOW_CREDENTIALS"] = True - # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) # diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 7caee27a..ed842bf3 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -21,7 +21,7 @@ services: database: condition: service_healthy database: - image: postgres:9.6 + image: postgres:latest environment: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres From 59e07eead36718f16d24cbf2b2b2b838a05434d3 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 28 Sep 2023 11:14:40 -0700 Subject: [PATCH 183/505] fix: trivial issue in refresh tokens --- apis/authentication.py | 8 +++--- apis/contributor.py | 30 ++++++++++------------ apis/participant.py | 2 +- apis/study.py | 20 +++------------ apis/study_metadata/study_available_ipd.py | 8 ------ 5 files changed, 23 insertions(+), 45 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index b7f9d18f..edd13d1a 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -80,7 +80,7 @@ def post(self): { "user": user.id, "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=60), + + datetime.timedelta(minutes=2), }, config.secret, algorithm="HS256", @@ -107,11 +107,11 @@ def authentication(): # Handle token expiration error here (e.g., re-authenticate the user) return "Token has expired, please re-authenticate", 401 user = User.query.get(decoded["user"]) - # if decoded in user.token_blacklist: + # if decoded in token_blacklist: # return "authentication failed", 403 g.user = user expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( - minutes=60 + minutes=2 ) new_token = jwt.encode( {"user": user.id, "exp": expires}, config.secret, algorithm="HS256" @@ -160,7 +160,7 @@ def authorization(): return if g.user: return - raise AccessDenied("Access denied") + return "Access denied", 403 def is_granted(permission: str, study_id: int): diff --git a/apis/contributor.py b/apis/contributor.py index 2d01630a..f8fc98ff 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -7,7 +7,7 @@ contributors_model = api.model( - "Version", + "Contributor", { "user_id": fields.String(required=True), "permission": fields.String(required=True), @@ -23,7 +23,7 @@ class AddContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def get(self, study_id: int): - contributors = StudyContributor.query.all() + contributors = StudyContributor.query.filter_by(study_id=study_id).all() return [c.to_dict() for c in contributors] @@ -32,32 +32,30 @@ class ContributorResource(Resource): @api.doc("contributor update") @api.response(200, "Success") @api.response(400, "Validation Error") - def put(self, study_id: int, user_id): - if not is_granted("admin", study_id): - return "Access denied, you can not modify permissions", 403 + def put(self, study_id: int, user_id: int): + """update contributor permissions""" if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 - data = request.json - contributors = StudyContributor.query.filter_by( - study_id=study_id, user_id=user_id - ) - contributors.permission = data + contributors = StudyContributor.query.filter_by(study_id=study_id, user_id=user_id).first() + if is_granted("admin", study_id) and contributors.permission=="owner": + return "Access denied, you can not modify", 403 + if is_granted("admin", study_id) and user_id != g.user.id and contributors.permission == "admin": + return "Access denied, you can not modify other admin permissions", 403 + contributors.update(data) db.session.commit() - return contributors.permission + return 204 @api.doc("contributor delete") @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - if is_granted("viewer", study_id): + if is_granted("owner", study_id): return "Access denied, you can not modify", 403 - contributor = StudyContributor.query.filter_by( - user_id=g.user.id, study_id=study_id - ) + + contributor = StudyContributor.query.filter_by(user_id=user_id, study_id=study_id).first() db.session.delete(contributor) db.session.commit() - print(contributor) return 204 diff --git a/apis/participant.py b/apis/participant.py index e556fa12..8b42de2a 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -7,7 +7,7 @@ api = Namespace("Participant", description="Participant operations", path="/") participant_model = api.model( - "Study", + "Participant", { "id": fields.String(required=True), "first_name": fields.String(required=True), diff --git a/apis/study.py b/apis/study.py index 92652e25..02a81127 100644 --- a/apis/study.py +++ b/apis/study.py @@ -60,6 +60,8 @@ def put(self, study_id: int): db.session.commit() return update_study.to_dict() + @api.response(200, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int): if not is_granted("owner", study_id): return "Access denied, you can not delete study", 403 @@ -80,29 +82,15 @@ def delete(self, study_id: int): return "", 204 -# @api.route("/viewProfile", methods=["GET"]) +# @api.route("/view-profile", methods=["GET"]) # def viewProfile(): -# dic = { -# "username": "admin", -# "email": "aydan.gasimova2@gmail.com", -# "fullname": "Aydan Gasimova", -# "image": f" https://api.dicebear.com/5.x/shapes/svg?seed=$" -# f"{str(random.randint(0,1000))}", -# "institution": "CALMI2", -# "location": "San Diego, CA", -# "password": "admin", -# "timezone": "(GMT-11:00) Midway Island", -# } # return jsonify(dic) # -# @study.route("/viewProfile", methods=["POST"]) +# @study.route("/view-profile", methods=["POST"]) # def update_user_profile(): # data = request.json -# # if data is not None: -# data["id"] = 3 -# # return jsonify(data), 201 # diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 04318f60..9cb89ebb 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -35,14 +35,6 @@ def get(self, study_id: int): @api.response(400, "Validation Error") @api.marshal_with(study_available) def post(self, study_id: int): - # parser = reqparse.RequestParser() - # parser.add_argument("username", type=str, required=True) - # parser.add_argument("password", type=str, required=True) - # parser.add_argument("username", type=str, required=True) - # parser.add_argument("password", type=str, required=True) - # parser.add_argument("password", type=str, required=True) - # args = parser.parse_args() - data = request.json study_obj = Study.query.get(study_id) list_of_elements = [] From fa41160dc9b400081431b859650c4cc68ae737a3 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 28 Sep 2023 18:15:23 +0000 Subject: [PATCH 184/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index f8fc98ff..acaf2cff 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -37,10 +37,16 @@ def put(self, study_id: int, user_id: int): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 data = request.json - contributors = StudyContributor.query.filter_by(study_id=study_id, user_id=user_id).first() - if is_granted("admin", study_id) and contributors.permission=="owner": + contributors = StudyContributor.query.filter_by( + study_id=study_id, user_id=user_id + ).first() + if is_granted("admin", study_id) and contributors.permission == "owner": return "Access denied, you can not modify", 403 - if is_granted("admin", study_id) and user_id != g.user.id and contributors.permission == "admin": + if ( + is_granted("admin", study_id) + and user_id != g.user.id + and contributors.permission == "admin" + ): return "Access denied, you can not modify other admin permissions", 403 contributors.update(data) db.session.commit() @@ -53,7 +59,9 @@ def delete(self, study_id: int, user_id: int): if is_granted("owner", study_id): return "Access denied, you can not modify", 403 - contributor = StudyContributor.query.filter_by(user_id=user_id, study_id=study_id).first() + contributor = StudyContributor.query.filter_by( + user_id=user_id, study_id=study_id + ).first() db.session.delete(contributor) db.session.commit() return 204 From a8afaa47d462aca4b82f918c0c74581e55a9bc31 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 28 Sep 2023 11:16:36 -0700 Subject: [PATCH 185/505] feat: added cascade to study relations --- model/dataset.py | 2 +- model/invited_study_contributor.py | 2 +- model/participant.py | 2 +- model/study_metadata/study_arm.py | 2 +- model/study_metadata/study_available_ipd.py | 6 +++--- model/study_metadata/study_contact.py | 2 +- model/study_metadata/study_description.py | 2 +- model/study_metadata/study_design.py | 2 +- model/study_metadata/study_eligibility.py | 2 +- model/study_metadata/study_identification.py | 2 +- model/study_metadata/study_intervention.py | 2 +- model/study_metadata/study_ipdsharing.py | 2 +- model/study_metadata/study_link.py | 2 +- model/study_metadata/study_location.py | 2 +- model/study_metadata/study_other.py | 2 +- model/study_metadata/study_overall_official.py | 2 +- model/study_metadata/study_reference.py | 2 +- model/study_metadata/study_sponsors_collaborators.py | 2 +- model/study_metadata/study_status.py | 2 +- model/token_blacklist.py | 8 +------- model/user.py | 1 - 21 files changed, 22 insertions(+), 29 deletions(-) diff --git a/model/dataset.py b/model/dataset.py index 947ee087..6e343164 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -19,7 +19,7 @@ def __init__(self, study): updated_on = db.Column(db.BigInteger, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="dataset") dataset_contributors = db.relationship( diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 778565c5..2108c092 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -18,7 +18,7 @@ def __init__(self, study, email_address, permission): permission = db.Column(db.String, nullable=False) invited_on = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), primary_key=True) study = db.relationship("Study", back_populates="invited_contributors") def to_dict(self): diff --git a/model/participant.py b/model/participant.py index 37a1671a..d2dd7954 100644 --- a/model/participant.py +++ b/model/participant.py @@ -20,7 +20,7 @@ def __init__(self, study): created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="participants") dataset_versions = db.relationship( "Version", diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 50d64289..502a6fa0 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -24,7 +24,7 @@ def __init__(self, study): intervention_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_arm") def to_dict(self): diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 5bec4b7a..c27049ad 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -22,7 +22,7 @@ def __init__(self, study): comment = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_available_ipd") def to_dict(self): @@ -38,13 +38,13 @@ def to_dict(self): @staticmethod def from_data(study, data: dict): - """Creates a new study from a dictionary""" + """Creates a new study metadata from a dictionary""" study_available = StudyAvailableIpd(study) study_available.update(data) return study_available def update(self, data): - """Updates the study from a dictionary""" + """Updates the study metadata from a dictionary""" self.identifier = data["identifier"] self.type = data["type"] self.url = data["url"] diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index de5492f9..9884211c 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -26,7 +26,7 @@ def __init__(self, study, role, central_contact): central_contact = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_contact") def to_dict(self): diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index daf56895..abfcf0c8 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -17,7 +17,7 @@ def __init__(self, study): brief_summary = db.Column(db.String, nullable=False) detailed_description = db.Column(db.String, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_description") def to_dict(self): diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 55ca0480..7bbea550 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -53,7 +53,7 @@ def __init__(self, study): target_duration = db.Column(db.String, nullable=True) number_groups_cohorts = db.Column(db.Integer, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_design") def to_dict(self): diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index d9c4a4f7..19b8cc74 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -39,7 +39,7 @@ def __init__(self, study): study_population = db.Column(db.String, nullable=True) sampling_method = db.Column(db.String, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_eligibility") def to_dict(self): diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 7c0dafae..e5545a6a 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -20,7 +20,7 @@ def __init__(self, study, secondary): secondary = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_identification") def to_dict(self): diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 11e18bd9..92e39dec 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -24,7 +24,7 @@ def __init__(self, study): other_name_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_intervention") def to_dict(self): diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 12dda9f8..c5c4dac9 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -27,7 +27,7 @@ def __init__(self, study): ipd_sharing_access_criteria = db.Column(db.String, nullable=False) ipd_sharing_url = db.Column(db.String, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_ipdsharing") def to_dict(self): diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 8fe82a08..6634e952 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -19,7 +19,7 @@ def __init__(self, study): title = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_link") def to_dict(self): diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index cc995302..a4ea4115 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -23,7 +23,7 @@ def __init__(self, study): country = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_location") def to_dict(self): diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index ced816ee..1e952412 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -24,7 +24,7 @@ def __init__(self, study): keywords = db.Column(ARRAY(String), nullable=False) size = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_other") def to_dict(self): diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index d302b6c8..5a379f23 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -20,7 +20,7 @@ def __init__(self, study): role = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_overall_official") def to_dict(self): diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index a39193bc..7d86392d 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -21,7 +21,7 @@ def __init__(self, study): citation = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_reference") def to_dict(self): diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index c3a6e853..e6548d00 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -27,7 +27,7 @@ def __init__(self, study): lead_sponsor_name = db.Column(db.String, nullable=False) collaborator_name = db.Column(ARRAY(String), nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_sponsors_collaborators") def to_dict(self): diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 5ff397af..94e3ab12 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -26,7 +26,7 @@ def __init__(self, study): completion_date = db.Column(db.String, nullable=True) completion_date_type = db.Column(db.String, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), nullable=False) + study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) study = db.relationship("Study", back_populates="study_status") def to_dict(self): diff --git a/model/token_blacklist.py b/model/token_blacklist.py index 0d8a01e1..597c1128 100644 --- a/model/token_blacklist.py +++ b/model/token_blacklist.py @@ -1,8 +1,5 @@ -import uuid -import model -from datetime import timezone + from .db import db -import datetime class TokenBlacklist(db.Model): @@ -10,9 +7,6 @@ class TokenBlacklist(db.Model): jti = db.Column(db.CHAR(36), primary_key=True) exp = db.Column(db.String, nullable=False) - user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) - user = db.relationship("User", back_populates="token_blacklist") - def to_dict(self): return { "jti": self.jti, diff --git a/model/user.py b/model/user.py index 5595a3fd..5b83acdb 100644 --- a/model/user.py +++ b/model/user.py @@ -24,7 +24,6 @@ def __init__(self, password, data): study_contributors = db.relationship("StudyContributor", back_populates="user") email_verification = db.relationship("EmailVerification", back_populates="user") - token_blacklist = db.relationship("TokenBlacklist", back_populates="user") user_details = db.relationship("UserDetails", uselist=False, back_populates="user") def to_dict(self): From 119f21501cd9e9afcdc572ad8bcaaf10bd624669 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 29 Sep 2023 08:44:33 -0700 Subject: [PATCH 186/505] fix: study POST endpoint adds creator as owner --- apis/authentication.py | 22 ++++++--------------- apis/contributor.py | 1 + apis/study.py | 16 ++++++++++------ app.py | 23 ++++++++++++++++++---- model/study.py | 39 +++++++++++++++++++------------------- model/study_contributor.py | 13 ++++++++++--- 6 files changed, 66 insertions(+), 48 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 7b9cf4c6..c4d2c996 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -80,7 +80,7 @@ def post(self): { "user": user.id, "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=2), + + datetime.timedelta(minutes=70), }, config.secret, algorithm="HS256", @@ -98,28 +98,18 @@ def authentication(): In addition, it handles error handling of expired token and non existed users""" g.user = None if "user" not in request.cookies: - return + return "user not found", 403 # if 'user' in token = request.cookies.get("user") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) + user = User.query.get(decoded["user"]) + # if decoded in token_blacklist: + # return "authentication failed", 403 + g.user = user except jwt.ExpiredSignatureError: # Handle token expiration error here (e.g., re-authenticate the user) - # TODO: delete the cookie return "Token has expired, please re-authenticate", 401 - user = User.query.get(decoded["user"]) - # if decoded in token_blacklist: - # return "authentication failed", 403 - g.user = user - expires = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( - minutes=2 - ) - new_token = jwt.encode( - {"user": user.id, "exp": expires}, config.secret, algorithm="HS256" - ) - resp = make_response("Token refreshed") - resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") - return resp @api.route("/auth/logout") diff --git a/apis/contributor.py b/apis/contributor.py index f8fc98ff..e439c5cd 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -36,6 +36,7 @@ def put(self, study_id: int, user_id: int): """update contributor permissions""" if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 + data = request.json contributors = StudyContributor.query.filter_by(study_id=study_id, user_id=user_id).first() if is_granted("admin", study_id) and contributors.permission=="owner": diff --git a/apis/study.py b/apis/study.py index 02a81127..303e9c4d 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,7 +1,7 @@ from flask import request, g from flask_restx import Namespace, Resource, fields -from model import Study, db, User +from model import Study, db, User, StudyContributor from .authentication import is_granted api = Namespace("Study", description="Study operations", path="/") @@ -25,8 +25,7 @@ class Studies(Resource): def get(self): """this code ensure each user access and see only allowed studies""" studies = Study.query.filter( - Study.study_contributors.any(User.id == g.user.id) - ).all() + Study.study_contributors.any(User.id == g.user.id)).all() return [s.to_dict() for s in studies] @api.expect(study_model) @@ -36,7 +35,12 @@ def post(self): add_study = Study.from_data(request.json) db.session.add(add_study) db.session.commit() - return add_study.to_dict() + study_id = add_study.id + study_ = Study.query.get(study_id) + study_contributor = StudyContributor.from_data(study_, g.user, "owner") + db.session.add(study_contributor) + db.session.commit() + return 204 @api.route("/study/") @@ -63,8 +67,8 @@ def put(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int): - if not is_granted("owner", study_id): - return "Access denied, you can not delete study", 403 + # if not is_granted("owner", study_id): + # return "Access denied, you can not delete study", 403 delete_study = Study.query.get(study_id) for d in delete_study.dataset: for version in d.dataset_versions: diff --git a/app.py b/app.py index 534b60c6..fd5f88f9 100644 --- a/app.py +++ b/app.py @@ -1,8 +1,11 @@ """Entry point for the application.""" -from flask import Flask, request +from flask import Flask, request, make_response, g +import jwt +import config from flask_cors import CORS from sqlalchemy import MetaData - +from datetime import timezone +import datetime import model from apis import api @@ -85,12 +88,24 @@ def create_app(): @app.before_request def on_before_request(): - authentication() - + try: + authentication() + except: + raise "User not found" authorization() # catch access denied error + # @app.after_request + # def on_after_request(resp): + # if request.path in "/auth/login": + # return resp + # expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=10) + # new_token = jwt.encode( + # {"user": g.user.id, "exp": expired_in}, config.secret, algorithm="HS256") + # resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") + # return resp + @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() diff --git a/model/study.py b/model/study.py index 715e9686..f5625f84 100644 --- a/model/study.py +++ b/model/study.py @@ -25,6 +25,7 @@ def __init__(self): self.study_description = model.StudyDescription(self) self.study_other = model.StudyOther(self) + # self.study_contributors = model.StudyContributor(self) __tablename__ = "study" id = db.Column(db.CHAR(36), primary_key=True) @@ -35,41 +36,41 @@ def __init__(self): dataset = db.relationship("Dataset", back_populates="study") study_contributors = db.relationship( - "StudyContributor", back_populates="study", lazy="dynamic" + "StudyContributor", back_populates="study", lazy="dynamic", cascade="all, delete", ) - participants = db.relationship("Participant", back_populates="study") + participants = db.relationship("Participant", back_populates="study", cascade="all, delete",) invited_contributors = db.relationship( - "StudyInvitedContributor", back_populates="study", lazy="dynamic" + "StudyInvitedContributor", back_populates="study", lazy="dynamic", cascade="all, delete", ) - study_arm = db.relationship("StudyArm", back_populates="study") - study_available_ipd = db.relationship("StudyAvailableIpd", back_populates="study") - study_contact = db.relationship("StudyContact", back_populates="study") + study_arm = db.relationship("StudyArm", back_populates="study", cascade="all, delete",) + study_available_ipd = db.relationship("StudyAvailableIpd", back_populates="study", cascade="all, delete",) + study_contact = db.relationship("StudyContact", back_populates="study", cascade="all, delete",) study_description = db.relationship( - "StudyDescription", uselist=False, back_populates="study" + "StudyDescription", uselist=False, back_populates="study", cascade="all, delete", ) - study_design = db.relationship("StudyDesign", uselist=False, back_populates="study") + study_design = db.relationship("StudyDesign", uselist=False, back_populates="study", cascade="all, delete",) study_eligibility = db.relationship( - "StudyEligibility", uselist=False, back_populates="study" + "StudyEligibility", uselist=False, back_populates="study", cascade="all, delete", ) study_identification = db.relationship( - "StudyIdentification", back_populates="study" + "StudyIdentification", back_populates="study", cascade="all, delete", ) - study_intervention = db.relationship("StudyIntervention", back_populates="study") + study_intervention = db.relationship("StudyIntervention", back_populates="study", cascade="all, delete",) study_ipdsharing = db.relationship( - "StudyIpdsharing", uselist=False, back_populates="study" + "StudyIpdsharing", uselist=False, back_populates="study", cascade="all, delete", ) - study_link = db.relationship("StudyLink", back_populates="study") - study_location = db.relationship("StudyLocation", back_populates="study") - study_other = db.relationship("StudyOther", uselist=False, back_populates="study") + study_link = db.relationship("StudyLink", back_populates="study", cascade="all, delete",) + study_location = db.relationship("StudyLocation", back_populates="study", cascade="all, delete",) + study_other = db.relationship("StudyOther", uselist=False, back_populates="study", cascade="all, delete",) study_overall_official = db.relationship( - "StudyOverallOfficial", back_populates="study" + "StudyOverallOfficial", back_populates="study", cascade="all, delete", ) - study_reference = db.relationship("StudyReference", back_populates="study") + study_reference = db.relationship("StudyReference", back_populates="study", cascade="all, delete",) study_sponsors_collaborators = db.relationship( - "StudySponsorsCollaborators", uselist=False, back_populates="study" + "StudySponsorsCollaborators", uselist=False, back_populates="study", cascade="all, delete", ) - study_status = db.relationship("StudyStatus", uselist=False, back_populates="study") + study_status = db.relationship("StudyStatus", uselist=False, back_populates="study", cascade="all, delete",) def to_dict(self): """Converts the study to a dictionary""" diff --git a/model/study_contributor.py b/model/study_contributor.py index 3307e6c9..b5edc1cf 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -5,7 +5,6 @@ class StudyContributor(db.Model): def __init__(self, study, user, permission): - self.id = str(uuid.uuid4()) self.study = study self.user = user self.permission = permission @@ -21,9 +20,17 @@ def __init__(self, study, user, permission): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) study = db.relationship("Study", back_populates="study_contributors") + @staticmethod def to_dict(self): return { "permission": self.permission, - "user_id": self.user_id, - "study_id": self.study_id, } + + @staticmethod + def from_data(study, user, permission): + contributor = StudyContributor(study, user, permission) + return contributor + # + # def update(self, permission): + # self.permission = permission["permission"] + From 34313d09edcd49c22480bfddb04eaa4ee24829f2 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 29 Sep 2023 22:17:23 +0000 Subject: [PATCH 187/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study.py | 3 +- model/dataset.py | 4 +- model/invited_study_contributor.py | 4 +- model/participant.py | 4 +- model/study.py | 107 ++++++++++++++---- model/study_contributor.py | 2 +- model/study_metadata/study_arm.py | 4 +- model/study_metadata/study_available_ipd.py | 4 +- model/study_metadata/study_contact.py | 4 +- model/study_metadata/study_description.py | 4 +- model/study_metadata/study_design.py | 4 +- model/study_metadata/study_eligibility.py | 4 +- model/study_metadata/study_identification.py | 4 +- model/study_metadata/study_intervention.py | 4 +- model/study_metadata/study_ipdsharing.py | 4 +- model/study_metadata/study_link.py | 4 +- model/study_metadata/study_location.py | 4 +- model/study_metadata/study_other.py | 4 +- .../study_metadata/study_overall_official.py | 4 +- model/study_metadata/study_reference.py | 4 +- .../study_sponsors_collaborators.py | 4 +- model/study_metadata/study_status.py | 4 +- model/token_blacklist.py | 1 - 23 files changed, 148 insertions(+), 41 deletions(-) diff --git a/apis/study.py b/apis/study.py index 303e9c4d..f85a75ca 100644 --- a/apis/study.py +++ b/apis/study.py @@ -25,7 +25,8 @@ class Studies(Resource): def get(self): """this code ensure each user access and see only allowed studies""" studies = Study.query.filter( - Study.study_contributors.any(User.id == g.user.id)).all() + Study.study_contributors.any(User.id == g.user.id) + ).all() return [s.to_dict() for s in studies] @api.expect(study_model) diff --git a/model/dataset.py b/model/dataset.py index 6e343164..e882ed47 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -19,7 +19,9 @@ def __init__(self, study): updated_on = db.Column(db.BigInteger, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="dataset") dataset_contributors = db.relationship( diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 2108c092..f1f6073b 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -18,7 +18,9 @@ def __init__(self, study, email_address, permission): permission = db.Column(db.String, nullable=False) invited_on = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), primary_key=True) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), primary_key=True + ) study = db.relationship("Study", back_populates="invited_contributors") def to_dict(self): diff --git a/model/participant.py b/model/participant.py index d2dd7954..e500b54c 100644 --- a/model/participant.py +++ b/model/participant.py @@ -20,7 +20,9 @@ def __init__(self, study): created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="participants") dataset_versions = db.relationship( "Version", diff --git a/model/study.py b/model/study.py index f5625f84..0b4ac095 100644 --- a/model/study.py +++ b/model/study.py @@ -36,41 +36,110 @@ def __init__(self): dataset = db.relationship("Dataset", back_populates="study") study_contributors = db.relationship( - "StudyContributor", back_populates="study", lazy="dynamic", cascade="all, delete", + "StudyContributor", + back_populates="study", + lazy="dynamic", + cascade="all, delete", + ) + participants = db.relationship( + "Participant", + back_populates="study", + cascade="all, delete", ) - participants = db.relationship("Participant", back_populates="study", cascade="all, delete",) invited_contributors = db.relationship( - "StudyInvitedContributor", back_populates="study", lazy="dynamic", cascade="all, delete", + "StudyInvitedContributor", + back_populates="study", + lazy="dynamic", + cascade="all, delete", ) - study_arm = db.relationship("StudyArm", back_populates="study", cascade="all, delete",) - study_available_ipd = db.relationship("StudyAvailableIpd", back_populates="study", cascade="all, delete",) - study_contact = db.relationship("StudyContact", back_populates="study", cascade="all, delete",) + study_arm = db.relationship( + "StudyArm", + back_populates="study", + cascade="all, delete", + ) + study_available_ipd = db.relationship( + "StudyAvailableIpd", + back_populates="study", + cascade="all, delete", + ) + study_contact = db.relationship( + "StudyContact", + back_populates="study", + cascade="all, delete", + ) study_description = db.relationship( - "StudyDescription", uselist=False, back_populates="study", cascade="all, delete", + "StudyDescription", + uselist=False, + back_populates="study", + cascade="all, delete", + ) + study_design = db.relationship( + "StudyDesign", + uselist=False, + back_populates="study", + cascade="all, delete", ) - study_design = db.relationship("StudyDesign", uselist=False, back_populates="study", cascade="all, delete",) study_eligibility = db.relationship( - "StudyEligibility", uselist=False, back_populates="study", cascade="all, delete", + "StudyEligibility", + uselist=False, + back_populates="study", + cascade="all, delete", ) study_identification = db.relationship( - "StudyIdentification", back_populates="study", cascade="all, delete", + "StudyIdentification", + back_populates="study", + cascade="all, delete", + ) + study_intervention = db.relationship( + "StudyIntervention", + back_populates="study", + cascade="all, delete", ) - study_intervention = db.relationship("StudyIntervention", back_populates="study", cascade="all, delete",) study_ipdsharing = db.relationship( - "StudyIpdsharing", uselist=False, back_populates="study", cascade="all, delete", + "StudyIpdsharing", + uselist=False, + back_populates="study", + cascade="all, delete", + ) + study_link = db.relationship( + "StudyLink", + back_populates="study", + cascade="all, delete", + ) + study_location = db.relationship( + "StudyLocation", + back_populates="study", + cascade="all, delete", + ) + study_other = db.relationship( + "StudyOther", + uselist=False, + back_populates="study", + cascade="all, delete", ) - study_link = db.relationship("StudyLink", back_populates="study", cascade="all, delete",) - study_location = db.relationship("StudyLocation", back_populates="study", cascade="all, delete",) - study_other = db.relationship("StudyOther", uselist=False, back_populates="study", cascade="all, delete",) study_overall_official = db.relationship( - "StudyOverallOfficial", back_populates="study", cascade="all, delete", + "StudyOverallOfficial", + back_populates="study", + cascade="all, delete", + ) + study_reference = db.relationship( + "StudyReference", + back_populates="study", + cascade="all, delete", ) - study_reference = db.relationship("StudyReference", back_populates="study", cascade="all, delete",) study_sponsors_collaborators = db.relationship( - "StudySponsorsCollaborators", uselist=False, back_populates="study", cascade="all, delete", + "StudySponsorsCollaborators", + uselist=False, + back_populates="study", + cascade="all, delete", + ) + study_status = db.relationship( + "StudyStatus", + uselist=False, + back_populates="study", + cascade="all, delete", ) - study_status = db.relationship("StudyStatus", uselist=False, back_populates="study", cascade="all, delete",) def to_dict(self): """Converts the study to a dictionary""" diff --git a/model/study_contributor.py b/model/study_contributor.py index b5edc1cf..c335ea90 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -30,7 +30,7 @@ def to_dict(self): def from_data(study, user, permission): contributor = StudyContributor(study, user, permission) return contributor + # # def update(self, permission): # self.permission = permission["permission"] - diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 502a6fa0..f2c9d1e8 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -24,7 +24,9 @@ def __init__(self, study): intervention_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_arm") def to_dict(self): diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index c27049ad..e4f4d58d 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -22,7 +22,9 @@ def __init__(self, study): comment = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_available_ipd") def to_dict(self): diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 9884211c..e7ea1126 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -26,7 +26,9 @@ def __init__(self, study, role, central_contact): central_contact = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_contact") def to_dict(self): diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index abfcf0c8..f0cb9979 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -17,7 +17,9 @@ def __init__(self, study): brief_summary = db.Column(db.String, nullable=False) detailed_description = db.Column(db.String, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_description") def to_dict(self): diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 7bbea550..041e23ae 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -53,7 +53,9 @@ def __init__(self, study): target_duration = db.Column(db.String, nullable=True) number_groups_cohorts = db.Column(db.Integer, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_design") def to_dict(self): diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 19b8cc74..af7ee5c0 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -39,7 +39,9 @@ def __init__(self, study): study_population = db.Column(db.String, nullable=True) sampling_method = db.Column(db.String, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_eligibility") def to_dict(self): diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index e5545a6a..fed33c4b 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -20,7 +20,9 @@ def __init__(self, study, secondary): secondary = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_identification") def to_dict(self): diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 92e39dec..e6f801b7 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -24,7 +24,9 @@ def __init__(self, study): other_name_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_intervention") def to_dict(self): diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index c5c4dac9..7b788420 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -27,7 +27,9 @@ def __init__(self, study): ipd_sharing_access_criteria = db.Column(db.String, nullable=False) ipd_sharing_url = db.Column(db.String, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_ipdsharing") def to_dict(self): diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 6634e952..c62be37d 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -19,7 +19,9 @@ def __init__(self, study): title = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_link") def to_dict(self): diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index a4ea4115..5a3533c2 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -23,7 +23,9 @@ def __init__(self, study): country = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_location") def to_dict(self): diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 1e952412..95f362e7 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -24,7 +24,9 @@ def __init__(self, study): keywords = db.Column(ARRAY(String), nullable=False) size = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_other") def to_dict(self): diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 5a379f23..4cc78a26 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -20,7 +20,9 @@ def __init__(self, study): role = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_overall_official") def to_dict(self): diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 7d86392d..693276c4 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -21,7 +21,9 @@ def __init__(self, study): citation = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_reference") def to_dict(self): diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index e6548d00..e362fd10 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -27,7 +27,9 @@ def __init__(self, study): lead_sponsor_name = db.Column(db.String, nullable=False) collaborator_name = db.Column(ARRAY(String), nullable=False) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_sponsors_collaborators") def to_dict(self): diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 94e3ab12..e1973dac 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -26,7 +26,9 @@ def __init__(self, study): completion_date = db.Column(db.String, nullable=True) completion_date_type = db.Column(db.String, nullable=True) - study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False) + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) study = db.relationship("Study", back_populates="study_status") def to_dict(self): diff --git a/model/token_blacklist.py b/model/token_blacklist.py index 597c1128..12475bd3 100644 --- a/model/token_blacklist.py +++ b/model/token_blacklist.py @@ -1,4 +1,3 @@ - from .db import db From 02ccb0d5ef06662b30310999dc0b7978b9627d5c Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 29 Sep 2023 19:28:28 -0700 Subject: [PATCH 188/505] fix: error handling --- apis/authentication.py | 24 ++++++++++++--------- app.py | 47 ++++++++++++++++++++++++++++-------------- 2 files changed, 45 insertions(+), 26 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index c4d2c996..e6b303db 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -31,6 +31,10 @@ class AccessDenied(Exception): pass +class UnauthenticatedException(Exception): + pass + + @api.route("/auth/signup") class SignUpUser(Resource): @api.response(200, "Success") @@ -80,7 +84,7 @@ def post(self): { "user": user.id, "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=70), + + datetime.timedelta(minutes=5), }, config.secret, algorithm="HS256", @@ -98,18 +102,17 @@ def authentication(): In addition, it handles error handling of expired token and non existed users""" g.user = None if "user" not in request.cookies: - return "user not found", 403 - # if 'user' in + return token = request.cookies.get("user") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) - user = User.query.get(decoded["user"]) - # if decoded in token_blacklist: - # return "authentication failed", 403 - g.user = user except jwt.ExpiredSignatureError: - # Handle token expiration error here (e.g., re-authenticate the user) - return "Token has expired, please re-authenticate", 401 + return + user = User.query.get(decoded["user"]) + # if decoded in token_blacklist: + # return "authentication failed", 403 + g.user = user + @api.route("/auth/logout") @@ -139,6 +142,7 @@ def get(self): def authorization(): """it checks whether url is allowed to be reached""" # white listed routes + public_routes = [ "/auth", "/docs", @@ -152,7 +156,7 @@ def authorization(): return if g.user: return - return "Access denied", 403 + raise UnauthenticatedException("Access denied", 403) def is_granted(permission: str, study_id: int): diff --git a/app.py b/app.py index fd5f88f9..75b02f97 100644 --- a/app.py +++ b/app.py @@ -10,7 +10,7 @@ import model from apis import api from flask_bcrypt import Bcrypt -from apis.authentication import authentication, authorization +from apis.authentication import authentication, authorization, UnauthenticatedException # from pyfairdatatools import __version__ @@ -90,21 +90,36 @@ def create_app(): def on_before_request(): try: authentication() - except: - raise "User not found" - authorization() - - # catch access denied error - - # @app.after_request - # def on_after_request(resp): - # if request.path in "/auth/login": - # return resp - # expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=10) - # new_token = jwt.encode( - # {"user": g.user.id, "exp": expired_in}, config.secret, algorithm="HS256") - # resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") - # return resp + authorization() + except UnauthenticatedException: + return "You are not allowed to access", 403 + + @app.after_request + def on_after_request(resp): + public_routes = [ + "/auth", + "/docs", + "/echo", + "/swaggerui", + "/swagger.json", + ] + for route in public_routes: + if request.path.startswith(route): + return resp + if "user" not in request.cookies: + return resp + try: + token = request.cookies.get("user") + jwt.decode(token, config.secret, algorithms=["HS256"]) + except jwt.ExpiredSignatureError: + resp.delete_cookie("user") + return resp + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=10) + new_token = jwt.encode({"user": g.user.id, "exp": expired_in}, config.secret, algorithm="HS256") + resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") + return resp + + @app.cli.command("destroy-schema") def destroy_schema(): From 8a1bda2a046e4e1f649caeef9d1f3a1c59ff9e0d Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sat, 30 Sep 2023 02:29:14 +0000 Subject: [PATCH 189/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 1 - app.py | 10 ++++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index e6b303db..750f4eb7 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -114,7 +114,6 @@ def authentication(): g.user = user - @api.route("/auth/logout") class Logout(Resource): @api.response(200, "Success") diff --git a/app.py b/app.py index 75b02f97..85555a38 100644 --- a/app.py +++ b/app.py @@ -114,13 +114,15 @@ def on_after_request(resp): except jwt.ExpiredSignatureError: resp.delete_cookie("user") return resp - expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=10) - new_token = jwt.encode({"user": g.user.id, "exp": expired_in}, config.secret, algorithm="HS256") + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=10 + ) + new_token = jwt.encode( + {"user": g.user.id, "exp": expired_in}, config.secret, algorithm="HS256" + ) resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") return resp - - @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() From 1127642a8e16582c173adba78b2d8ac9772119d5 Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 1 Oct 2023 15:26:16 -0700 Subject: [PATCH 190/505] feat: added authorization permissions to contrbtor POST DEL --- apis/authentication.py | 56 +++++++++++++++++------------------ apis/contributor.py | 60 +++++++++++++++++++++++++------------- apis/study.py | 5 ++-- model/study_contributor.py | 6 ++-- 4 files changed, 71 insertions(+), 56 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index e6b303db..4601ea55 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -84,7 +84,7 @@ def post(self): { "user": user.id, "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=5), + + datetime.timedelta(minutes=20), }, config.secret, algorithm="HS256", @@ -114,35 +114,9 @@ def authentication(): g.user = user - -@api.route("/auth/logout") -class Logout(Resource): - @api.response(200, "Success") - @api.response(400, "Validation Error") - def post(self): - """simply logges out user from the system""" - resp = make_response() - resp.status = 204 - resp.delete_cookie("user") - return resp - - -@api.route("/auth/current-users") -class CurrentUsers(Resource): - """function is used to see all logged users in the system. For now, it is used for testing purposes""" - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def get(self): - if not g.user: - return None - return g.user.to_dict() - - def authorization(): - """it checks whether url is allowed to be reached""" + """it checks whether url is allowed to be reached to specific routes""" # white listed routes - public_routes = [ "/auth", "/docs", @@ -164,4 +138,30 @@ def is_granted(permission: str, study_id: int): contributor = StudyContributor.query.filter_by( user_id=g.user.id, study_id=study_id ).first() + return contributor.permission == permission + + +@api.route("/auth/logout") +class Logout(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self): + """simply logges out user from the system""" + resp = make_response() + resp.status = 204 + resp.delete_cookie("user") + return resp + + + +@api.route("/auth/current-users") +class CurrentUsers(Resource): + """function is used to see all logged users in the system. For now, it is used for testing purposes""" + + @api.response(200, "Success") + @api.response(400, "Validation Error") + def get(self): + if not g.user: + return None + return g.user.to_dict() diff --git a/apis/contributor.py b/apis/contributor.py index 584f2c49..c4417bcf 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -23,7 +23,7 @@ class AddContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def get(self, study_id: int): - contributors = StudyContributor.query.filter_by(study_id=study_id).all() + contributors = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).all() return [c.to_dict() for c in contributors] @@ -34,36 +34,54 @@ class ContributorResource(Resource): @api.response(400, "Validation Error") def put(self, study_id: int, user_id: int): """update contributor permissions""" - if is_granted("viewer", study_id): - return "Access denied, you can not modify", 403 - data = request.json + assigned_permissions = ["owner", "editor", "admin", "viewer"] contributors = StudyContributor.query.filter_by( - study_id=study_id, user_id=user_id - ).first() - if is_granted("admin", study_id) and contributors.permission == "owner": - return "Access denied, you can not modify", 403 - if ( - is_granted("admin", study_id) - and user_id != g.user.id - and contributors.permission == "admin" - ): - return "Access denied, you can not modify other admin permissions", 403 + study_id=study_id, user_id=user_id).first() + permissions = StudyContributor.query.filter_by( + study_id=study_id).all() + permissions_list = [i.permission for i in permissions] + if "owner" in permissions_list and data["permission"]== "owner": + return "This study already contains an owner, only one owner is allowed", 403 + if data["permission"] not in assigned_permissions: + return "Please choose one of allowed permissions", 403 + if is_granted("viewer", study_id): + return "Access denied, viewer can not modify", 403 + if is_granted("editor", study_id): + if data["permission"] == "owner" or data["permission"] == "admin": + return "Access denied, editor can not modify admin or other owners", 403 + if is_granted("admin", study_id): + if user_id != g.user.id: + if contributors.permission == "admin" or contributors.permission == "owner": + return "Access denied, you can not modify other admin's or owner's permissions", 403 + elif user_id == g.user.id and data["permission"] == "owner": + return "Access denied, you can not assign an owner", 403 + if is_granted("owner", study_id): + if user_id != g.user.id: + if data["permission"] == "admin": + return "Access denied, you can give an admin access to other contributors", 403 contributors.update(data) db.session.commit() + print(permissions_list) return 204 @api.doc("contributor delete") @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - if is_granted("owner", study_id): - return "Access denied, you can not modify", 403 - - contributor = StudyContributor.query.filter_by( - user_id=user_id, study_id=study_id - ).first() - db.session.delete(contributor) + contributors = StudyContributor.query.filter_by( + study_id=study_id, user_id=user_id).first() + if not is_granted("owner", study_id) and contributors.permission == "admin": + return "Access denied, you can not delete admin contributors", 403 + if is_granted("editor", study_id): + if contributors.permission == "admin" or contributors.permission == "owner": + return "Access denied, editor can not delete admins or owners of the system", 403 + if is_granted("admin", study_id): + if contributors.permission == "admin" or contributors.permission == "owner": + return "Access denied, you can not delete other admin's or owner's permissions", 403 + elif user_id == g.user.id: + return "Access denied, you can not assign an owner", 403 + db.session.delete(contributors) db.session.commit() return 204 diff --git a/apis/study.py b/apis/study.py index f85a75ca..aa452b0d 100644 --- a/apis/study.py +++ b/apis/study.py @@ -35,7 +35,6 @@ def get(self): def post(self): add_study = Study.from_data(request.json) db.session.add(add_study) - db.session.commit() study_id = add_study.id study_ = Study.query.get(study_id) study_contributor = StudyContributor.from_data(study_, g.user, "owner") @@ -68,8 +67,8 @@ def put(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int): - # if not is_granted("owner", study_id): - # return "Access denied, you can not delete study", 403 + if not is_granted("owner", study_id): + return "Access denied, you can not delete study", 403 delete_study = Study.query.get(study_id) for d in delete_study.dataset: for version in d.dataset_versions: diff --git a/model/study_contributor.py b/model/study_contributor.py index c335ea90..0e1dfa77 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -20,7 +20,6 @@ def __init__(self, study, user, permission): study_id = db.Column(db.CHAR(36), db.ForeignKey("study.id"), primary_key=True) study = db.relationship("Study", back_populates="study_contributors") - @staticmethod def to_dict(self): return { "permission": self.permission, @@ -31,6 +30,5 @@ def from_data(study, user, permission): contributor = StudyContributor(study, user, permission) return contributor - # - # def update(self, permission): - # self.permission = permission["permission"] + def update(self, permission): + self.permission = permission["permission"] From 2ccc87229fd61c115ba2b471d35239a963a1759c Mon Sep 17 00:00:00 2001 From: aydawka Date: Sun, 1 Oct 2023 21:17:17 -0700 Subject: [PATCH 191/505] feat: token blacklist added --- apis/authentication.py | 20 ++++++++++---------- apis/dataset.py | 10 ++++++++++ app.py | 30 +++++++++++------------------- model/dataset.py | 18 +++++++++--------- model/study_contributor.py | 2 ++ model/user.py | 4 ++-- 6 files changed, 44 insertions(+), 40 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 4601ea55..e441b8e9 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -3,10 +3,10 @@ from model import StudyContributor from datetime import timezone import datetime -from dateutil.parser import parse -from model import db, User, UserDetails +from model import db, User, TokenBlacklist import jwt import config +import uuid api = Namespace("Authentication", description="Authentication paths", path="/") @@ -85,13 +85,14 @@ def post(self): "user": user.id, "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=20), + "jti": str(uuid.uuid4()) }, config.secret, algorithm="HS256", ) resp = make_response(user.to_dict()) resp.set_cookie( - "user", encoded_jwt_code, secure=True, httponly=True, samesite="lax" + "token", encoded_jwt_code, secure=True, httponly=True, samesite="lax" ) resp.status = 200 return resp @@ -101,16 +102,17 @@ def authentication(): """it authenticates users to a study, sets access and refresh token. In addition, it handles error handling of expired token and non existed users""" g.user = None - if "user" not in request.cookies: + if "token" not in request.cookies: return - token = request.cookies.get("user") + token = request.cookies.get("token") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: return + token_blacklist = TokenBlacklist.query.get(decoded["jti"]) + if token_blacklist: + return user = User.query.get(decoded["user"]) - # if decoded in token_blacklist: - # return "authentication failed", 403 g.user = user @@ -138,7 +140,6 @@ def is_granted(permission: str, study_id: int): contributor = StudyContributor.query.filter_by( user_id=g.user.id, study_id=study_id ).first() - return contributor.permission == permission @@ -150,11 +151,10 @@ def post(self): """simply logges out user from the system""" resp = make_response() resp.status = 204 - resp.delete_cookie("user") + resp.delete_cookie("token") return resp - @api.route("/auth/current-users") class CurrentUsers(Resource): """function is used to see all logged users in the system. For now, it is used for testing purposes""" diff --git a/apis/dataset.py b/apis/dataset.py index 774041a2..c81a00ca 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -64,6 +64,14 @@ def post(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") class DatasetResource(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + def get(self, study_id, dataset_id): + data_obj = Dataset.query.get(dataset_id) + return data_obj.to_dict() + + @api.response(201, "Success") + @api.response(400, "Validation Error") def put(self, study_id, dataset_id): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 @@ -73,6 +81,8 @@ def put(self, study_id, dataset_id): db.session.commit() return data_obj.to_dict() + @api.response(201, "Success") + @api.response(400, "Validation Error") def delete(self, study_id, dataset_id): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 diff --git a/app.py b/app.py index 75b02f97..0ae69e9b 100644 --- a/app.py +++ b/app.py @@ -92,35 +92,27 @@ def on_before_request(): authentication() authorization() except UnauthenticatedException: - return "You are not allowed to access", 403 + return "Authentication is required", 401 @app.after_request def on_after_request(resp): - public_routes = [ - "/auth", - "/docs", - "/echo", - "/swaggerui", - "/swagger.json", - ] - for route in public_routes: - if request.path.startswith(route): - return resp - if "user" not in request.cookies: + if "token" not in request.cookies: return resp + token = request.cookies.get("token") try: - token = request.cookies.get("user") - jwt.decode(token, config.secret, algorithms=["HS256"]) + decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: - resp.delete_cookie("user") + resp.delete_cookie("token") + return resp + token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) + if token_blacklist: + resp.delete_cookie("token") return resp expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=10) - new_token = jwt.encode({"user": g.user.id, "exp": expired_in}, config.secret, algorithm="HS256") - resp.set_cookie("user", new_token, secure=True, httponly=True, samesite="lax") + new_token = jwt.encode({"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, config.secret, algorithm="HS256") + resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") return resp - - @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() diff --git a/model/dataset.py b/model/dataset.py index e882ed47..8871461d 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -25,25 +25,25 @@ def __init__(self, study): study = db.relationship("Study", back_populates="dataset") dataset_contributors = db.relationship( - "DatasetContributor", back_populates="dataset" + "DatasetContributor", back_populates="dataset", cascade="all, delete", ) dataset_versions = db.relationship( - "Version", back_populates="dataset", lazy="dynamic" + "Version", back_populates="dataset", lazy="dynamic", cascade="all, delete", ) - dataset_access = db.relationship("DatasetAccess", back_populates="dataset") - dataset_consent = db.relationship("DatasetConsent", back_populates="dataset") - dataset_date = db.relationship("DatasetDate", back_populates="dataset") + dataset_access = db.relationship("DatasetAccess", back_populates="dataset", cascade="all, delete",) + dataset_consent = db.relationship("DatasetConsent", back_populates="dataset", cascade="all, delete",) + dataset_date = db.relationship("DatasetDate", back_populates="dataset", cascade="all, delete",) dataset_de_ident_level = db.relationship( - "DatasetDeIdentLevel", back_populates="dataset" + "DatasetDeIdentLevel", back_populates="dataset", cascade="all, delete", ) dataset_description = db.relationship( - "DatasetDescription", back_populates="dataset" + "DatasetDescription", back_populates="dataset", cascade="all, delete", ) - dataset_funder = db.relationship("DatasetFunder", back_populates="dataset") + dataset_funder = db.relationship("DatasetFunder", back_populates="dataset", cascade="all, delete",) dataset_alternate_identifier = db.relationship( - "DatasetAlternateIdentifier", back_populates="dataset" + "DatasetAlternateIdentifier", back_populates="dataset", cascade="all, delete", ) dataset_managing_organization = db.relationship( "DatasetManagingOrganization", back_populates="dataset" diff --git a/model/study_contributor.py b/model/study_contributor.py index 0e1dfa77..31a6df12 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -23,6 +23,8 @@ def __init__(self, study, user, permission): def to_dict(self): return { "permission": self.permission, + "study_id": self.study_id, + "user_id": self.user_id } @staticmethod diff --git a/model/user.py b/model/user.py index 5b83acdb..10a04bf8 100644 --- a/model/user.py +++ b/model/user.py @@ -31,8 +31,8 @@ def to_dict(self): "id": self.id, "email_address": self.email_address, "username": self.username, - "first_name": self.user_details.first_name, - "last_name": self.user_details.last_name, + "first_name": self.user_details.first_name if self.user_details else None, + "last_name": self.user_details.last_name if self.user_details else None, } @staticmethod From c0760d98f3bf9be977ffc54530d1d957e3a420ae Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 2 Oct 2023 09:17:57 -0700 Subject: [PATCH 192/505] fix: permissions for contributor PUT endpoint --- apis/authentication.py | 19 ++++++++-- apis/contributor.py | 73 ++++++++++++++++++++----------------- apis/invited_contributor.py | 6 +-- apis/study.py | 14 +++---- 4 files changed, 65 insertions(+), 47 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index e441b8e9..f2be1fe6 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -135,12 +135,23 @@ def authorization(): raise UnauthenticatedException("Access denied", 403) -def is_granted(permission: str, study_id: int): +def is_granted(permission: str, study): """filters users and checks whether current permission equal to passed permission""" - contributor = StudyContributor.query.filter_by( - user_id=g.user.id, study_id=study_id + contributor = StudyContributor.query.filter( + StudyContributor.user == g.user, StudyContributor.study == study ).first() - return contributor.permission == permission + if not contributor: + return False + role = { + "owner": ["owner", "view", "delete", "invite", "publish_dataset", "add_dataset", "delete_dataset", "permission"], + "admin": ["admin", "view", "invite", "publish_dataset", "add_dataset", "delete_dataset", "permission"], + "editor": ["editor", "view", "add_dataset", "permission"], + "viewer": ["viewer", "view", ], + } + + return permission in role[contributor.permission] + + @api.route("/auth/logout") diff --git a/apis/contributor.py b/apis/contributor.py index c4417bcf..4fa54518 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,3 +1,5 @@ +from collections import OrderedDict + from flask_restx import Namespace, Resource, fields from flask import request, g from model import StudyContributor, Study, db, User @@ -9,9 +11,8 @@ contributors_model = api.model( "Contributor", { - "user_id": fields.String(required=True), "permission": fields.String(required=True), - "study_id": fields.String(required=True), + }, ) @@ -32,38 +33,47 @@ class ContributorResource(Resource): @api.doc("contributor update") @api.response(200, "Success") @api.response(400, "Validation Error") + @api.expect(contributors_model) def put(self, study_id: int, user_id: int): """update contributor permissions""" + + study = Study.query.get(study_id) + if not is_granted("permission", study): + return "Access denied, you are not authorized to change this permission", 403 + data = request.json - assigned_permissions = ["owner", "editor", "admin", "viewer"] - contributors = StudyContributor.query.filter_by( - study_id=study_id, user_id=user_id).first() - permissions = StudyContributor.query.filter_by( - study_id=study_id).all() - permissions_list = [i.permission for i in permissions] - if "owner" in permissions_list and data["permission"]== "owner": - return "This study already contains an owner, only one owner is allowed", 403 - if data["permission"] not in assigned_permissions: - return "Please choose one of allowed permissions", 403 - if is_granted("viewer", study_id): - return "Access denied, viewer can not modify", 403 - if is_granted("editor", study_id): - if data["permission"] == "owner" or data["permission"] == "admin": - return "Access denied, editor can not modify admin or other owners", 403 - if is_granted("admin", study_id): - if user_id != g.user.id: - if contributors.permission == "admin" or contributors.permission == "owner": - return "Access denied, you can not modify other admin's or owner's permissions", 403 - elif user_id == g.user.id and data["permission"] == "owner": - return "Access denied, you can not assign an owner", 403 - if is_granted("owner", study_id): - if user_id != g.user.id: - if data["permission"] == "admin": - return "Access denied, you can give an admin access to other contributors", 403 - contributors.update(data) + user = User.query.get(user_id) + permission = data["permission"] + grantee = StudyContributor.query.filter( + StudyContributor.user == user, StudyContributor.study == study + ).first() + + granter = StudyContributor.query.filter( + StudyContributor.user == g.user, StudyContributor.study == study + ).first() + + # Order should go from the least privileged to the most privileged + grants = OrderedDict() + grants["viewer"] = [] + grants["editor"] = ["viewer"] + grants["admin"] = ["viewer", "editor"] + grants["owner"] = ["editor", "viewer", "admin"] + + can_grant = permission in grants[granter.permission] + if not can_grant: + return f"User cannot grant {permission}", 403 + + # Granter can not downgrade anyone of equal or greater permissions other than themselves + # TODO: Owners downgrading themselves + if user != g.user: + grantee_level = list(grants.keys()).index(grantee.permission) # 2 + new_level = list(grants.keys()).index(permission) # 0 + granter_level = list(grants.keys()).index(granter.permission) #2 + if granter_level <= grantee_level and new_level < grantee_level: + return f"User cannot downgrade from {grantee.permission} to {permission}", 403 + grantee.permission = permission db.session.commit() - print(permissions_list) - return 204 + return grantee.to_dict(), 200 @api.doc("contributor delete") @api.response(200, "Success") @@ -85,6 +95,3 @@ def delete(self, study_id: int, user_id: int): db.session.commit() return 204 - -# will need to implement it in all endpoints for which that permission is relevant -# Permissions should be only a database query and conditional statement. Failing permissions should result in a 403 diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 2e37e467..47b96e4e 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -30,11 +30,11 @@ class AddInvitedContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): - if is_granted("viewer", study_id): + if is_granted("invite", study_id): return "Access denied, you can not modify", 403 # try: - if is_granted("editor", study_id): - return "Access denied, you can not invite other contributors", 403 + # contributors = StudyContributor.query.filter_by( + # study_id=study_id, user_id=g.user.id).first() study_obj = Study.query.get(study_id) data = request.json email_address = data["email_address"] diff --git a/apis/study.py b/apis/study.py index aa452b0d..caef4774 100644 --- a/apis/study.py +++ b/apis/study.py @@ -67,21 +67,21 @@ def put(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int): - if not is_granted("owner", study_id): + study = Study.query.get(study_id) + if not is_granted("delete", study): return "Access denied, you can not delete study", 403 - delete_study = Study.query.get(study_id) - for d in delete_study.dataset: + for d in study.dataset: for version in d.dataset_versions: version.participants.clear() - for d in delete_study.dataset: + for d in study.dataset: for version in d.dataset_versions: db.session.delete(version) db.session.delete(d) - for p in delete_study.participants: + for p in study.participants: db.session.delete(p) - for c in delete_study.study_contributors: + for c in study.study_contributors: db.session.delete(c) - db.session.delete(delete_study) + db.session.delete(study) db.session.commit() return "", 204 From 870c658fd0044ec9ba3ef560b405510d130d0413 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 2 Oct 2023 16:19:41 +0000 Subject: [PATCH 193/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 30 ++++++++++++++++++++----- apis/contributor.py | 35 +++++++++++++++++++---------- app.py | 10 +++++++-- model/dataset.py | 45 ++++++++++++++++++++++++++++++-------- model/study_contributor.py | 2 +- model/user.py | 2 +- 6 files changed, 94 insertions(+), 30 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index f2be1fe6..52885b0f 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -85,7 +85,7 @@ def post(self): "user": user.id, "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=20), - "jti": str(uuid.uuid4()) + "jti": str(uuid.uuid4()), }, config.secret, algorithm="HS256", @@ -143,17 +143,35 @@ def is_granted(permission: str, study): if not contributor: return False role = { - "owner": ["owner", "view", "delete", "invite", "publish_dataset", "add_dataset", "delete_dataset", "permission"], - "admin": ["admin", "view", "invite", "publish_dataset", "add_dataset", "delete_dataset", "permission"], + "owner": [ + "owner", + "view", + "delete", + "invite", + "publish_dataset", + "add_dataset", + "delete_dataset", + "permission", + ], + "admin": [ + "admin", + "view", + "invite", + "publish_dataset", + "add_dataset", + "delete_dataset", + "permission", + ], "editor": ["editor", "view", "add_dataset", "permission"], - "viewer": ["viewer", "view", ], + "viewer": [ + "viewer", + "view", + ], } return permission in role[contributor.permission] - - @api.route("/auth/logout") class Logout(Resource): @api.response(200, "Success") diff --git a/apis/contributor.py b/apis/contributor.py index 4fa54518..f25d6a66 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -12,7 +12,6 @@ "Contributor", { "permission": fields.String(required=True), - }, ) @@ -24,7 +23,9 @@ class AddContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def get(self, study_id: int): - contributors = StudyContributor.query.filter_by(user_id=g.user.id, study_id=study_id).all() + contributors = StudyContributor.query.filter_by( + user_id=g.user.id, study_id=study_id + ).all() return [c.to_dict() for c in contributors] @@ -39,7 +40,10 @@ def put(self, study_id: int, user_id: int): study = Study.query.get(study_id) if not is_granted("permission", study): - return "Access denied, you are not authorized to change this permission", 403 + return ( + "Access denied, you are not authorized to change this permission", + 403, + ) data = request.json user = User.query.get(user_id) @@ -66,11 +70,14 @@ def put(self, study_id: int, user_id: int): # Granter can not downgrade anyone of equal or greater permissions other than themselves # TODO: Owners downgrading themselves if user != g.user: - grantee_level = list(grants.keys()).index(grantee.permission) # 2 - new_level = list(grants.keys()).index(permission) # 0 - granter_level = list(grants.keys()).index(granter.permission) #2 + grantee_level = list(grants.keys()).index(grantee.permission) # 2 + new_level = list(grants.keys()).index(permission) # 0 + granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level and new_level < grantee_level: - return f"User cannot downgrade from {grantee.permission} to {permission}", 403 + return ( + f"User cannot downgrade from {grantee.permission} to {permission}", + 403, + ) grantee.permission = permission db.session.commit() return grantee.to_dict(), 200 @@ -80,18 +87,24 @@ def put(self, study_id: int, user_id: int): @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): contributors = StudyContributor.query.filter_by( - study_id=study_id, user_id=user_id).first() + study_id=study_id, user_id=user_id + ).first() if not is_granted("owner", study_id) and contributors.permission == "admin": return "Access denied, you can not delete admin contributors", 403 if is_granted("editor", study_id): if contributors.permission == "admin" or contributors.permission == "owner": - return "Access denied, editor can not delete admins or owners of the system", 403 + return ( + "Access denied, editor can not delete admins or owners of the system", + 403, + ) if is_granted("admin", study_id): if contributors.permission == "admin" or contributors.permission == "owner": - return "Access denied, you can not delete other admin's or owner's permissions", 403 + return ( + "Access denied, you can not delete other admin's or owner's permissions", + 403, + ) elif user_id == g.user.id: return "Access denied, you can not assign an owner", 403 db.session.delete(contributors) db.session.commit() return 204 - diff --git a/app.py b/app.py index 0ae69e9b..01d2f6b0 100644 --- a/app.py +++ b/app.py @@ -108,8 +108,14 @@ def on_after_request(resp): if token_blacklist: resp.delete_cookie("token") return resp - expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(minutes=10) - new_token = jwt.encode({"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, config.secret, algorithm="HS256") + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=10 + ) + new_token = jwt.encode( + {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, + config.secret, + algorithm="HS256", + ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") return resp diff --git a/model/dataset.py b/model/dataset.py index 8871461d..91accda4 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -25,25 +25,52 @@ def __init__(self, study): study = db.relationship("Study", back_populates="dataset") dataset_contributors = db.relationship( - "DatasetContributor", back_populates="dataset", cascade="all, delete", + "DatasetContributor", + back_populates="dataset", + cascade="all, delete", ) dataset_versions = db.relationship( - "Version", back_populates="dataset", lazy="dynamic", cascade="all, delete", + "Version", + back_populates="dataset", + lazy="dynamic", + cascade="all, delete", ) - dataset_access = db.relationship("DatasetAccess", back_populates="dataset", cascade="all, delete",) - dataset_consent = db.relationship("DatasetConsent", back_populates="dataset", cascade="all, delete",) - dataset_date = db.relationship("DatasetDate", back_populates="dataset", cascade="all, delete",) + dataset_access = db.relationship( + "DatasetAccess", + back_populates="dataset", + cascade="all, delete", + ) + dataset_consent = db.relationship( + "DatasetConsent", + back_populates="dataset", + cascade="all, delete", + ) + dataset_date = db.relationship( + "DatasetDate", + back_populates="dataset", + cascade="all, delete", + ) dataset_de_ident_level = db.relationship( - "DatasetDeIdentLevel", back_populates="dataset", cascade="all, delete", + "DatasetDeIdentLevel", + back_populates="dataset", + cascade="all, delete", ) dataset_description = db.relationship( - "DatasetDescription", back_populates="dataset", cascade="all, delete", + "DatasetDescription", + back_populates="dataset", + cascade="all, delete", ) - dataset_funder = db.relationship("DatasetFunder", back_populates="dataset", cascade="all, delete",) + dataset_funder = db.relationship( + "DatasetFunder", + back_populates="dataset", + cascade="all, delete", + ) dataset_alternate_identifier = db.relationship( - "DatasetAlternateIdentifier", back_populates="dataset", cascade="all, delete", + "DatasetAlternateIdentifier", + back_populates="dataset", + cascade="all, delete", ) dataset_managing_organization = db.relationship( "DatasetManagingOrganization", back_populates="dataset" diff --git a/model/study_contributor.py b/model/study_contributor.py index 31a6df12..9bd0bc73 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -24,7 +24,7 @@ def to_dict(self): return { "permission": self.permission, "study_id": self.study_id, - "user_id": self.user_id + "user_id": self.user_id, } @staticmethod diff --git a/model/user.py b/model/user.py index 10a04bf8..ca793497 100644 --- a/model/user.py +++ b/model/user.py @@ -32,7 +32,7 @@ def to_dict(self): "email_address": self.email_address, "username": self.username, "first_name": self.user_details.first_name if self.user_details else None, - "last_name": self.user_details.last_name if self.user_details else None, + "last_name": self.user_details.last_name if self.user_details else None, } @staticmethod From d0b2ff614953eed3c264139e4bfddc3664a198ef Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 2 Oct 2023 10:57:20 -0700 Subject: [PATCH 194/505] fix: permissions for contributor DELETE endpoint --- apis/contributor.py | 46 +++++++++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 4fa54518..cfbe99c4 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -35,7 +35,7 @@ class ContributorResource(Resource): @api.response(400, "Validation Error") @api.expect(contributors_model) def put(self, study_id: int, user_id: int): - """update contributor permissions""" + """update contributor based on the assigned permissions""" study = Study.query.get(study_id) if not is_granted("permission", study): @@ -67,8 +67,8 @@ def put(self, study_id: int, user_id: int): # TODO: Owners downgrading themselves if user != g.user: grantee_level = list(grants.keys()).index(grantee.permission) # 2 - new_level = list(grants.keys()).index(permission) # 0 - granter_level = list(grants.keys()).index(granter.permission) #2 + new_level = list(grants.keys()).index(permission) # 0 + granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level and new_level < grantee_level: return f"User cannot downgrade from {grantee.permission} to {permission}", 403 grantee.permission = permission @@ -79,19 +79,33 @@ def put(self, study_id: int, user_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - contributors = StudyContributor.query.filter_by( - study_id=study_id, user_id=user_id).first() - if not is_granted("owner", study_id) and contributors.permission == "admin": - return "Access denied, you can not delete admin contributors", 403 - if is_granted("editor", study_id): - if contributors.permission == "admin" or contributors.permission == "owner": - return "Access denied, editor can not delete admins or owners of the system", 403 - if is_granted("admin", study_id): - if contributors.permission == "admin" or contributors.permission == "owner": - return "Access denied, you can not delete other admin's or owner's permissions", 403 - elif user_id == g.user.id: - return "Access denied, you can not assign an owner", 403 - db.session.delete(contributors) + data = request.json + study = Study.query.get(study_id) + if not is_granted("delete_contributors", study): + return "Access denied, you are not authorized to change this permission", 403 + user = User.query.get(user_id) + grantee = StudyContributor.query.filter( + StudyContributor.user == user, StudyContributor.study == study + ).first() + + granter = StudyContributor.query.filter( + StudyContributor.user == g.user, StudyContributor.study == study + ).first() + # Order should go from the least privileged to the most privileged + grants = OrderedDict() + grants["viewer"] = [] + grants["editor"] = ["viewer"] + grants["admin"] = ["viewer", "editor"] + grants["owner"] = ["editor", "viewer", "admin"] + + # Granter can not downgrade anyone of equal or greater permissions other than themselves + # TODO: Owners downgrading themselves + if user != g.user: + grantee_level = list(grants.keys()).index(grantee.permission) # 2 + granter_level = list(grants.keys()).index(granter.permission) # 2 + if granter_level <= grantee_level: + return f"You are not authorized to delete {grantee.permission} permission", 403 + db.session.delete(grantee) db.session.commit() return 204 From 814fe1a92be3af40552604f5894f4c232353b163 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 2 Oct 2023 14:23:51 -0700 Subject: [PATCH 195/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20fix=20cors=20opti?= =?UTF-8?q?ons=20request?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 +- app.py | 25 +++++++++++++++++++++++++ dev-docker-compose.yaml | 11 ++++++----- 3 files changed, 32 insertions(+), 6 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 52885b0f..9688a45f 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -126,7 +126,7 @@ def authorization(): "/swaggerui", "/swagger.json", ] - print(g.user) + print("g.user", g.user) for route in public_routes: if request.path.startswith(route): return diff --git a/app.py b/app.py index 01d2f6b0..57cde384 100644 --- a/app.py +++ b/app.py @@ -71,6 +71,14 @@ def create_app(): supports_credentials=True, ) + # app.config[ + # "CORS_ALLOW_HEADERS" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # app.config["CORS_SUPPORTS_CREDENTIALS"] = True + # app.config[ + # "CORS_EXPOSE_HEADERS" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) # @@ -88,6 +96,9 @@ def create_app(): @app.before_request def on_before_request(): + if request.method == "OPTIONS": + return + try: authentication() authorization() @@ -96,6 +107,8 @@ def on_before_request(): @app.after_request def on_after_request(resp): + print("after request") + print(request.cookies.get("token")) if "token" not in request.cookies: return resp token = request.cookies.get("token") @@ -117,6 +130,18 @@ def on_after_request(resp): algorithm="HS256", ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") + + # resp.headers["Access-Control-Allow-Origin"] = "http://localhost:3000" + # resp.headers["Access-Control-Allow-Credentials"] = "true" + # resp.headers[ + # "Access-Control-Allow-Headers" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # resp.headers[ + # "Access-Control-Expose-Headers" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + + print(resp.headers) + return resp @app.cli.command("destroy-schema") diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index ed842bf3..b1ecd444 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -3,7 +3,7 @@ services: flask-api: build: context: . - dockerfile: Dockerfile + dockerfile: Dockerfile ports: - 5000:5000 # volumes: @@ -26,14 +26,15 @@ services: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: postgres + PGUSER: postgres ports: - 5432:5432 # restart: always healthcheck: - test: "pg_isready --username=postgres && psql --username=postgres --list" - interval: 5s + test: pg_isready + interval: 10s timeout: 5s retries: 5 - volumes: - - ./postgres-data:/var/lib/postgresql/data + # volumes: + # - ./postgres-data:/var/lib/postgresql/data # - ./sql/init_timezones.sql:/docker-entrypoint-initdb.d/1-schema.sql From 5384e0372b096c095f53d451c4d54994fb709f7b Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 2 Oct 2023 14:36:12 -0700 Subject: [PATCH 196/505] fix:refactoring folder --- apis/authentication.py | 43 ++++++++++++++++++++++++++++++++++++++---- apis/contributor.py | 1 - apis/signup_user.py | 39 -------------------------------------- apis/study.py | 6 ++---- app.py | 4 ++-- model/study.py | 2 +- 6 files changed, 44 insertions(+), 51 deletions(-) delete mode 100644 apis/signup_user.py diff --git a/apis/authentication.py b/apis/authentication.py index f2be1fe6..4c25b8ed 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -143,10 +143,45 @@ def is_granted(permission: str, study): if not contributor: return False role = { - "owner": ["owner", "view", "delete", "invite", "publish_dataset", "add_dataset", "delete_dataset", "permission"], - "admin": ["admin", "view", "invite", "publish_dataset", "add_dataset", "delete_dataset", "permission"], - "editor": ["editor", "view", "add_dataset", "permission"], - "viewer": ["viewer", "view", ], + "owner": [ + "owner", + "view", + "delete_study", + "delete_contributor", + "invite_contributor", + "publish_dataset", + "add_dataset", + "delete_dataset", + "permission", + "edit_study", + "update_participant" + "delete_participant" + + ], + "admin": [ + "admin", + "view", + "invite_contributor", + "publish_dataset", + "add_dataset", + "delete_dataset", + "permission", + "update_participant", + "delete_participant" + + ], + "editor": [ + "editor", + "view", + "add_dataset", + "permission", + "update_participant", + "delete_participant" + ], + "viewer": [ + "viewer", + "view" + ], } return permission in role[contributor.permission] diff --git a/apis/contributor.py b/apis/contributor.py index cfbe99c4..d4aa7196 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -87,7 +87,6 @@ def delete(self, study_id: int, user_id: int): grantee = StudyContributor.query.filter( StudyContributor.user == user, StudyContributor.study == study ).first() - granter = StudyContributor.query.filter( StudyContributor.user == g.user, StudyContributor.study == study ).first() diff --git a/apis/signup_user.py b/apis/signup_user.py deleted file mode 100644 index 2d466efe..00000000 --- a/apis/signup_user.py +++ /dev/null @@ -1,39 +0,0 @@ -from flask import request -from flask_restx import Namespace, Resource, fields - -from model import db, User - -api = Namespace("Signup", description="Signup user", path="/") - -signup_model = api.model( - "Signup", - { - "id": fields.String(required=True), - "email_address": fields.String(required=True), - "email_verified": fields.String(required=True), - "username": fields.String(required=True), - "created_at": fields.Integer(required=True), - }, -) - - -@api.route("/auth/signup") -class SignUpUser(Resource): - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(signup_model) - def post(self): - data = request.json - # TODO data[email doesnt exist then raise error; json validation library - if not data["email_address"]: - raise "Email is not found" - user = User.query.filter_by(email_address=data["email_address"]).one_or_none() - if user: - return "This email address is already in use", 409 - # user = User.query.filter_by(username=data["username"]).one_or_none() - # if user: - # return "This username is already in use", 409 - user = User.from_data(data) - db.session.add(user) - db.session.commit() - return user.to_dict(), 201 diff --git a/apis/study.py b/apis/study.py index caef4774..39d9038b 100644 --- a/apis/study.py +++ b/apis/study.py @@ -57,7 +57,7 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int): - if is_granted("viewer", study_id): + if is_granted("edit_study", study_id): return "Access denied, you can not modify", 403 update_study = Study.query.get(study_id) update_study.update(request.json) @@ -68,7 +68,7 @@ def put(self, study_id: int): @api.response(400, "Validation Error") def delete(self, study_id: int): study = Study.query.get(study_id) - if not is_granted("delete", study): + if not is_granted("delete_study", study): return "Access denied, you can not delete study", 403 for d in study.dataset: for version in d.dataset_versions: @@ -79,8 +79,6 @@ def delete(self, study_id: int): db.session.delete(d) for p in study.participants: db.session.delete(p) - for c in study.study_contributors: - db.session.delete(c) db.session.delete(study) db.session.commit() return "", 204 diff --git a/app.py b/app.py index 0ae69e9b..1e537b7f 100644 --- a/app.py +++ b/app.py @@ -10,7 +10,7 @@ import model from apis import api from flask_bcrypt import Bcrypt -from apis.authentication import authentication, authorization, UnauthenticatedException +from apis.authentication import authentication, authorization, UnauthenticatedException, AccessDenied # from pyfairdatatools import __version__ @@ -91,7 +91,7 @@ def on_before_request(): try: authentication() authorization() - except UnauthenticatedException: + except AccessDenied: return "Authentication is required", 401 @app.after_request diff --git a/model/study.py b/model/study.py index 0b4ac095..5703bfb5 100644 --- a/model/study.py +++ b/model/study.py @@ -34,7 +34,7 @@ def __init__(self): created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) - dataset = db.relationship("Dataset", back_populates="study") + dataset = db.relationship("Dataset", back_populates="study", cascade="all, delete",) study_contributors = db.relationship( "StudyContributor", back_populates="study", From b9bde7b67f3c3e8a239ee205aabc560739f113ac Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 2 Oct 2023 21:40:36 +0000 Subject: [PATCH 197/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 16 ++++------------ apis/contributor.py | 16 ++++++++++++---- app.py | 7 ++++++- model/study.py | 6 +++++- 4 files changed, 27 insertions(+), 18 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index efade3b4..c5103c00 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -154,9 +154,7 @@ def is_granted(permission: str, study): "delete_dataset", "permission", "edit_study", - "update_participant" - "delete_participant" - + "update_participant" "delete_participant", ], "admin": [ "admin", @@ -167,8 +165,7 @@ def is_granted(permission: str, study): "delete_dataset", "permission", "update_participant", - "delete_participant" - + "delete_participant", ], "editor": [ "editor", @@ -176,19 +173,14 @@ def is_granted(permission: str, study): "add_dataset", "permission", "update_participant", - "delete_participant" + "delete_participant", ], - "viewer": [ - "viewer", - "view" - ], + "viewer": ["viewer", "view"], } return permission in role[contributor.permission] - - @api.route("/auth/logout") class Logout(Resource): @api.response(200, "Success") diff --git a/apis/contributor.py b/apis/contributor.py index caeff00a..1bcfc7e6 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -74,7 +74,10 @@ def put(self, study_id: int, user_id: int): new_level = list(grants.keys()).index(permission) # 0 granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level and new_level < grantee_level: - return f"User cannot downgrade from {grantee.permission} to {permission}", 403 + return ( + f"User cannot downgrade from {grantee.permission} to {permission}", + 403, + ) grantee.permission = permission db.session.commit() return grantee.to_dict(), 200 @@ -86,7 +89,10 @@ def delete(self, study_id: int, user_id: int): data = request.json study = Study.query.get(study_id) if not is_granted("delete_contributors", study): - return "Access denied, you are not authorized to change this permission", 403 + return ( + "Access denied, you are not authorized to change this permission", + 403, + ) user = User.query.get(user_id) grantee = StudyContributor.query.filter( StudyContributor.user == user, StudyContributor.study == study @@ -107,8 +113,10 @@ def delete(self, study_id: int, user_id: int): grantee_level = list(grants.keys()).index(grantee.permission) # 2 granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level: - return f"You are not authorized to delete {grantee.permission} permission", 403 + return ( + f"You are not authorized to delete {grantee.permission} permission", + 403, + ) db.session.delete(grantee) db.session.commit() return 204 - diff --git a/app.py b/app.py index f6e2ba2d..ce5ca42d 100644 --- a/app.py +++ b/app.py @@ -10,7 +10,12 @@ import model from apis import api from flask_bcrypt import Bcrypt -from apis.authentication import authentication, authorization, UnauthenticatedException, AccessDenied +from apis.authentication import ( + authentication, + authorization, + UnauthenticatedException, + AccessDenied, +) # from pyfairdatatools import __version__ diff --git a/model/study.py b/model/study.py index 5703bfb5..feff41c4 100644 --- a/model/study.py +++ b/model/study.py @@ -34,7 +34,11 @@ def __init__(self): created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) - dataset = db.relationship("Dataset", back_populates="study", cascade="all, delete",) + dataset = db.relationship( + "Dataset", + back_populates="study", + cascade="all, delete", + ) study_contributors = db.relationship( "StudyContributor", back_populates="study", From 53442a205bc2905079abbcc3d39ec69df1a96410 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 2 Oct 2023 16:31:42 -0700 Subject: [PATCH 198/505] fix: study POST --- apis/study.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study.py b/apis/study.py index 39d9038b..14adf62b 100644 --- a/apis/study.py +++ b/apis/study.py @@ -40,7 +40,7 @@ def post(self): study_contributor = StudyContributor.from_data(study_, g.user, "owner") db.session.add(study_contributor) db.session.commit() - return 204 + return study_.to_dict() @api.route("/study/") From dbd465afcca123965a78353f7ef9b7d21435df2d Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 3 Oct 2023 10:59:51 -0700 Subject: [PATCH 199/505] fix: contributor permisssions --- apis/authentication.py | 47 +++++++++++++++++++++++-------------- apis/contributor.py | 10 ++++---- apis/invited_contributor.py | 13 ++++------ apis/study.py | 10 +++++--- app.py | 4 ++-- 5 files changed, 48 insertions(+), 36 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index efade3b4..c1e8444d 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -27,9 +27,6 @@ ) -class AccessDenied(Exception): - pass - class UnauthenticatedException(Exception): pass @@ -146,49 +143,63 @@ def is_granted(permission: str, study): "owner": [ "owner", "view", - "delete_study", + "permission", "delete_contributor", "invite_contributor", - "publish_dataset", + "add_study", + "update_study", + "delete_study", "add_dataset", + "update_dataset", "delete_dataset", - "permission", - "edit_study", - "update_participant" - "delete_participant" + "publish_version", + "participant", + "study_metadata", + "dataset_metadata", ], "admin": [ "admin", "view", + "permission", + "delete_contributor", "invite_contributor", - "publish_dataset", + "add_study", + "update_study", "add_dataset", + "update_dataset", "delete_dataset", - "permission", - "update_participant", - "delete_participant" + "publish_version", + "participant", + "study_metadata", + "dataset_metadata" ], "editor": [ "editor", "view", - "add_dataset", "permission", - "update_participant", - "delete_participant" + "delete_contributor", + "add_study", + "update_study", + "add_dataset", + "update_dataset", + "delete_dataset", + "participant", + "study_metadata", + "dataset_metadata" + ], "viewer": [ "viewer", "view" - ], + ], } return permission in role[contributor.permission] - @api.route("/auth/logout") class Logout(Resource): @api.response(200, "Success") diff --git a/apis/contributor.py b/apis/contributor.py index caeff00a..a1f7e9cc 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -85,8 +85,8 @@ def put(self, study_id: int, user_id: int): def delete(self, study_id: int, user_id: int): data = request.json study = Study.query.get(study_id) - if not is_granted("delete_contributors", study): - return "Access denied, you are not authorized to change this permission", 403 + if not is_granted("delete_contributor", study): + return "Access denied, you are not authorized to delete this contributor", 403 user = User.query.get(user_id) grantee = StudyContributor.query.filter( StudyContributor.user == user, StudyContributor.study == study @@ -107,8 +107,10 @@ def delete(self, study_id: int, user_id: int): grantee_level = list(grants.keys()).index(grantee.permission) # 2 granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level: - return f"You are not authorized to delete {grantee.permission} permission", 403 + return f"You are not authorized to delete {grantee.permission}s from study", 403 db.session.delete(grantee) db.session.commit() - return 204 + contributors = StudyContributor.query.filter(StudyContributor.study == study + ).all() + return [contributor.to_dict() for contributor in contributors], 200 diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py index 47b96e4e..d420957c 100644 --- a/apis/invited_contributor.py +++ b/apis/invited_contributor.py @@ -1,10 +1,8 @@ from flask_restx import Namespace, Resource, fields from model import ( - StudyInvitedContributor, Study, db, User, - StudyContributor, StudyException, ) from flask import request @@ -16,9 +14,8 @@ contributors_model = api.model( "InvitedContributor", { - "user_id": fields.String(required=True), "permission": fields.String(required=True), - "study_id": fields.String(required=True), + "email_address": fields.String(required=True), }, ) @@ -26,16 +23,14 @@ @api.route("/study//invited-contributor") class AddInvitedContributor(Resource): @api.doc("invited contributor") + @api.expect(contributors_model) @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): - if is_granted("invite", study_id): - return "Access denied, you can not modify", 403 - # try: - # contributors = StudyContributor.query.filter_by( - # study_id=study_id, user_id=g.user.id).first() study_obj = Study.query.get(study_id) + if not is_granted("invite_contributor", study_obj): + return "Access denied, you can not modify", 403 data = request.json email_address = data["email_address"] user = User.query.filter_by(email_address=email_address).first() diff --git a/apis/study.py b/apis/study.py index 14adf62b..6361beb4 100644 --- a/apis/study.py +++ b/apis/study.py @@ -57,9 +57,10 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int): - if is_granted("edit_study", study_id): - return "Access denied, you can not modify", 403 update_study = Study.query.get(study_id) + if not is_granted("update_study", update_study): + return "Access denied, you can not modify", 403 + update_study.update(request.json) db.session.commit() return update_study.to_dict() @@ -81,7 +82,10 @@ def delete(self, study_id: int): db.session.delete(p) db.session.delete(study) db.session.commit() - return "", 204 + studies = Study.query.filter( + Study.study_contributors.any(User.id == g.user.id) + ).all() + return [s.to_dict() for s in studies], 201 # @api.route("/view-profile", methods=["GET"]) diff --git a/app.py b/app.py index f6e2ba2d..57cde384 100644 --- a/app.py +++ b/app.py @@ -10,7 +10,7 @@ import model from apis import api from flask_bcrypt import Bcrypt -from apis.authentication import authentication, authorization, UnauthenticatedException, AccessDenied +from apis.authentication import authentication, authorization, UnauthenticatedException # from pyfairdatatools import __version__ @@ -102,7 +102,7 @@ def on_before_request(): try: authentication() authorization() - except AccessDenied: + except UnauthenticatedException: return "Authentication is required", 401 @app.after_request From e4061fcf6e4dbd2f5ba8e349609c4d61c23ecdf6 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 3 Oct 2023 18:02:52 +0000 Subject: [PATCH 200/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 14 +++----------- apis/contributor.py | 9 ++++++--- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index c1e8444d..ad642fcf 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -27,7 +27,6 @@ ) - class UnauthenticatedException(Exception): pass @@ -156,7 +155,6 @@ def is_granted(permission: str, study): "participant", "study_metadata", "dataset_metadata", - ], "admin": [ "admin", @@ -172,8 +170,7 @@ def is_granted(permission: str, study): "publish_version", "participant", "study_metadata", - "dataset_metadata" - + "dataset_metadata", ], "editor": [ "editor", @@ -187,19 +184,14 @@ def is_granted(permission: str, study): "delete_dataset", "participant", "study_metadata", - "dataset_metadata" - + "dataset_metadata", ], - "viewer": [ - "viewer", - "view" - ], + "viewer": ["viewer", "view"], } return permission in role[contributor.permission] - @api.route("/auth/logout") class Logout(Resource): @api.response(200, "Success") diff --git a/apis/contributor.py b/apis/contributor.py index d3534122..43f5ca65 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -113,10 +113,13 @@ def delete(self, study_id: int, user_id: int): grantee_level = list(grants.keys()).index(grantee.permission) # 2 granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level: - return f"You are not authorized to delete {grantee.permission}s from study", 403 + return ( + f"You are not authorized to delete {grantee.permission}s from study", + 403, + ) db.session.delete(grantee) db.session.commit() - contributors = StudyContributor.query.filter(StudyContributor.study == study + contributors = StudyContributor.query.filter( + StudyContributor.study == study ).all() return [contributor.to_dict() for contributor in contributors], 200 - From b23cd2f8455bcafa20677fc00d6f758e3b76c6ed Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 3 Oct 2023 11:03:51 -0700 Subject: [PATCH 201/505] =?UTF-8?q?=F0=9F=92=A1=20dx:=20add=20comments=20f?= =?UTF-8?q?or=20study=20metadata=20apis=20routes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_arm.py | 18 ++++++++- apis/study_metadata/study_available_ipd.py | 24 ++++++++++-- apis/study_metadata/study_contact.py | 22 ++++++++++- apis/study_metadata/study_description.py | 14 ++++++- apis/study_metadata/study_design.py | 14 ++++++- apis/study_metadata/study_eligibility.py | 11 +++++- apis/study_metadata/study_identification.py | 25 ++++++++++++- apis/study_metadata/study_intervention.py | 20 +++++++++- apis/study_metadata/study_ipdsharing.py | 13 ++++++- apis/study_metadata/study_link.py | 26 +++++++++++-- apis/study_metadata/study_location.py | 21 ++++++++++- apis/study_metadata/study_other.py | 37 ++++++++++++++++++- apis/study_metadata/study_overall_official.py | 24 +++++++++++- apis/study_metadata/study_reference.py | 25 ++++++++++++- .../study_sponsors_collaborators.py | 24 +++++++++++- apis/study_metadata/study_status.py | 14 ++++++- 16 files changed, 302 insertions(+), 30 deletions(-) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index d753206e..116beb86 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,6 +1,7 @@ +"""API routes for study arm metadata""" from flask_restx import Resource, fields -from model import Study, db, StudyArm, Arm from flask import request +from model import Study, db, StudyArm, Arm from apis.study_metadata_namespace import api @@ -27,18 +28,25 @@ @api.route("/study//metadata/arm") class StudyArmResource(Resource): + """Study Arm Metadata""" + @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_arm) def get(self, study_id): + """Get study arm metadata""" study_ = Study.query.get(study_id) + arm = Arm(study_) return arm.to_dict() def post(self, study_id): + """Create study arm metadata""" data = request.json + study_obj = Study.query.get(study_id) + for i in data: if "id" in i and i["id"]: study_arm_ = StudyArm.query.get(i["id"]) @@ -46,15 +54,23 @@ def post(self, study_id): elif "id" not in i or not i["id"]: study_arm_ = StudyArm.from_data(study_obj, i) db.session.add(study_arm_) + db.session.commit() + arms = Arm(study_obj) + return arms.to_dict() # todo delete @api.route("/study//metadata/arm/") class StudyArmUpdate(Resource): + """Study Arm Metadata""" + def delete(self, study_id: int, arm_id: int): + """Delete study arm metadata""" study_arm_ = StudyArm.query.get(arm_id) + db.session.delete(study_arm_) db.session.commit() + return 204 diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 9cb89ebb..07058264 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,7 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyAvailableIpd +"""API routes for study available ipd metadata""" +from flask_restx import Resource, fields from flask import request -from flask_restx import reqparse +from model import Study, db, StudyAvailableIpd from apis.study_metadata_namespace import api study_available = api.model( @@ -18,16 +18,22 @@ @api.route("/study//metadata/available-ipd") class StudyAvailableResource(Resource): + """Study Available Metadata""" + @api.doc("available") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_available) def get(self, study_id: int): + """Get study available metadata""" study_ = Study.query.get(study_id) + study_available_ipd_ = study_.study_available_ipd + sorted_study_available_ipd = sorted( study_available_ipd_, key=lambda x: x.created_at ) + return [s.to_dict() for s in sorted_study_available_ipd] @api.doc("update available") @@ -35,9 +41,13 @@ def get(self, study_id: int): @api.response(400, "Validation Error") @api.marshal_with(study_available) def post(self, study_id: int): + """Create study available metadata""" data = request.json + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: if "id" in i and i["id"]: study_available_ipd_ = StudyAvailableIpd.query.get(i["id"]) @@ -47,14 +57,22 @@ def post(self, study_id: int): study_available_ipd_ = StudyAvailableIpd.from_data(study_obj, i) db.session.add(study_available_ipd_) list_of_elements.append(study_available_ipd_.to_dict()) + db.session.commit() + return list_of_elements @api.route("/study//metadata/available-ipd/") class StudyLocationUpdate(Resource): + """Study Available Metadata""" + def delete(self, study_id: int, available_ipd_id: int): + """Delete study available metadata""" + study_available_ = StudyAvailableIpd.query.get(available_ipd_id) + db.session.delete(study_available_) db.session.commit() + return 204 diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index d9779449..262f86cd 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyContact +"""API routes for study contact metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db, StudyContact from apis.study_metadata_namespace import api study_contact = api.model( @@ -20,20 +21,30 @@ @api.route("/study//metadata/central-contact") class StudyContactResource(Resource): + """Study Contact Metadata""" + @api.doc("contact") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(study_contact) def get(self, study_id: int): + """Get study contact metadata""" study_ = Study.query.get(study_id) + study_contact_ = study_.study_contact + sorted_study_contact = sorted(study_contact_, key=lambda x: x.created_at) + return [s.to_dict() for s in sorted_study_contact if s.central_contact] def post(self, study_id: int): + """Create study contact metadata""" data = request.json + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: if "id" in i and i["id"]: study_contact_ = StudyContact.query.get(i["id"]) @@ -43,13 +54,20 @@ def post(self, study_id: int): study_contact_ = StudyContact.from_data(study_obj, i, None, True) db.session.add(study_contact_) list_of_elements.append(study_contact_.to_dict()) + db.session.commit() + return list_of_elements @api.route("/study//metadata/central-contact/") class StudyContactUpdate(Resource): + """Study Contact Metadata""" + def delete(self, study_id: int, central_contact_id: int): + """Delete study contact metadata""" study_contact_ = StudyContact.query.get(central_contact_id) + db.session.delete(study_contact_) db.session.commit() + return study_contact_.to_dict() diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 9da7c40f..bcfb0f6c 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyDescription +"""API routes for study description metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db from apis.study_metadata_namespace import api @@ -18,17 +19,26 @@ @api.route("/study//metadata/description") class StudyDescriptionResource(Resource): + """Study Description Metadata""" + @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(study_description) def get(self, study_id: int): + """Get study description metadata""" study_ = Study.query.get(study_id) + study_description_ = study_.study_description + return study_description_.to_dict() def put(self, study_id: int): + """Update study description metadata""" study_ = Study.query.get(study_id) + study_.study_description.update(request.json) + db.session.commit() + return study_.study_description.to_dict() diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 3b7f2875..7c2d21a7 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyDesign +"""API routes for study design metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db from apis.study_metadata_namespace import api @@ -33,17 +34,26 @@ @api.route("/study//metadata/design") class StudyDesignResource(Resource): + """Study Design Metadata""" + @api.doc("design") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(study_design) def get(self, study_id: int): + """Get study design metadata""" study_ = Study.query.get(study_id) + study_design_ = study_.study_design + return study_design_.to_dict() def put(self, study_id: int): + """Update study design metadata""" study_ = Study.query.get(study_id) + study_.study_design.update(request.json) + db.session.commit() + return study_.study_design.to_dict() diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index d5fbc29b..1a340a5c 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -1,6 +1,7 @@ +"""API routes for study eligibility metadata""" from flask_restx import Resource, fields -from model import Study, db, StudyEligibility from flask import request +from model import Study, db from apis.study_metadata_namespace import api @@ -29,19 +30,27 @@ @api.route("/study//metadata/eligibility") class StudyEligibilityResource(Resource): + """Study Eligibility Metadata""" + @api.doc("eligibility") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_eligibility) def get(self, study_id: int): + """Get study eligibility metadata""" study_ = Study.query.get(study_id) + return study_.study_eligibility.to_dict() def put(self, study_id: int): + """Update study eligibility metadata""" study_ = Study.query.get(study_id) + study_.study_eligibility.update(request.json) + db.session.commit() + return study_.study_eligibility.to_dict() # def post(self, study_id: int): diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 8c286d2e..e8a7f168 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyIdentification, Identifiers +"""API routes for study identification metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db, StudyIdentification, Identifiers from apis.study_metadata_namespace import api @@ -21,21 +22,30 @@ @api.route("/study//metadata/identification") class StudyIdentificationResource(Resource): + """Study Identification Metadata""" + @api.doc("identification") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") # @api.marshal_with(study_identification) def get(self, study_id: int): + """Get study identification metadata""" study_ = Study.query.get(study_id) + identifiers = Identifiers(study_) + return identifiers.to_dict() def post(self, study_id: int): + """Create study identification metadata""" data = request.json + study_obj = Study.query.get(study_id) + primary = data["primary"] primary["secondary"] = False + if "id" in primary and primary["id"]: study_identification_ = StudyIdentification.query.get(primary["id"]) study_identification_.update(primary) @@ -44,8 +54,10 @@ def post(self, study_id: int): study_obj, primary, False ) db.session.add(study_identification_) + for i in data["secondary"]: i["secondary"] = True + if "id" in i and i["id"]: study_identification_ = StudyIdentification.query.get(i["id"]) study_identification_.update(i) @@ -54,16 +66,25 @@ def post(self, study_id: int): study_obj, i, True ) db.session.add(study_identification_) + db.session.commit() + identifiers = Identifiers(study_obj) + return identifiers.to_dict() @api.route("/study//metadata/identification/") class StudyIdentificationdUpdate(Resource): + """Study Identification Metadata""" + def delete(self, study_id: int, identification_id: int): + """Delete study identification metadata""" study_identification_ = StudyIdentification.query.get(identification_id) + if not study_identification_.secondary: return 400, "primary identifier can not be deleted" + db.session.delete(study_identification_) db.session.commit() + return 204 diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 4112ac77..32d36cfb 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -1,6 +1,7 @@ +"""API routes for study intervention metadata""" from flask_restx import Resource, fields -from model import Study, db, StudyIntervention from flask import request +from model import Study, db, StudyIntervention from apis.study_metadata_namespace import api @@ -21,23 +22,33 @@ @api.route("/study//metadata/intervention") class StudyInterventionResource(Resource): + """Study Intervention Metadata""" + @api.doc("intervention") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_intervention) def get(self, study_id: int): + """Get study intervention metadata""" study_ = Study.query.get(study_id) + study_intervention_ = study_.study_intervention + sorted_study_intervention = sorted( study_intervention_, key=lambda x: x.created_at ) + return [s.to_dict() for s in sorted_study_intervention] def post(self, study_id: int): + """Create study intervention metadata""" data = request.json + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: if "id" in i and i["id"]: study_intervention_ = StudyIntervention.query.get(i["id"]) @@ -47,14 +58,21 @@ def post(self, study_id: int): study_intervention_ = StudyIntervention.from_data(study_obj, i) db.session.add(study_intervention_) list_of_elements.append(study_intervention_.to_dict()) + db.session.commit() return list_of_elements @api.route("/study//metadata/intervention/") class StudyInterventionUpdate(Resource): + """Study Intervention Metadata""" + def delete(self, study_id: int, intervention_id: int): + """Delete study intervention metadata""" study_intervention_ = StudyIntervention.query.get(intervention_id) + db.session.delete(study_intervention_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 579a975f..ec62e23a 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyIpdsharing +"""API routes for study ipdsharing metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db from apis.study_metadata_namespace import api @@ -22,19 +23,27 @@ @api.route("/study//metadata/ipdsharing") class StudyIpdsharingResource(Resource): + """Study Ipdsharing Metadata""" + @api.doc("ipdsharing") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_ipdsharing) def get(self, study_id: int): + """Get study ipdsharing metadata""" study_ = Study.query.get(study_id) + return study_.study_ipdsharing.to_dict() def put(self, study_id: int): + """Create study ipdsharing metadata""" study_ = Study.query.get(study_id) + study_.study_ipdsharing.update(request.json) + db.session.commit() + return study_.study_ipdsharing.to_dict() # def post(self, study_id: int): diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index ddb6f245..bdf679b7 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyLink +"""API routes for study link metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db, StudyLink from apis.study_metadata_namespace import api @@ -18,40 +19,59 @@ @api.route("/study//metadata/link") class StudyLinkResource(Resource): + """Study Link Metadata""" + @api.doc("link") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_link) def get(self, study_id: int): + """Get study link metadata""" study_ = Study.query.get(study_id) + study_link_ = study_.study_link + sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at) + return [s.to_dict() for s in sorted_study_link_] def post(self, study_id: int): + """Create study link metadata""" data = request.json + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: if "id" in i and i["id"]: study_link_ = StudyLink.query.get(i["id"]) - if study_link_ == None: + if study_link_ is None: return f"Study link {i['id']} Id is not found", 404 study_link_.update(i) + list_of_elements.append(study_link_.to_dict()) elif "id" not in i or not i["id"]: study_link_ = StudyLink.from_data(study_obj, i) db.session.add(study_link_) + list_of_elements.append(study_link_.to_dict()) db.session.commit() + return list_of_elements @api.route("/study//metadata/link/") class StudyLinkUpdate(Resource): + """Study Link Metadata""" + def delete(self, study_id: int, link_id: int): + """Delete study link metadata""" study_link_ = StudyLink.query.get(link_id) + db.session.delete(study_link_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index b6f6cdb0..2f5def7a 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -1,6 +1,7 @@ +"""API routes for study location metadata""" from flask_restx import Resource, fields -from model import Study, db, StudyLocation from flask import request +from model import Study, db, StudyLocation from apis.study_metadata_namespace import api @@ -22,21 +23,31 @@ @api.route("/study//metadata/location") class StudyLocationResource(Resource): + """Study Location Metadata""" + @api.doc("location") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_location) def get(self, study_id: int): + """Get study location metadata""" study_ = Study.query.get(study_id) + study_location_ = study_.study_location + sorted_study_location = sorted(study_location_, key=lambda x: x.created_at) + return [s.to_dict() for s in sorted_study_location] def post(self, study_id: int): + """Create study location metadata""" data = request.json + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: if "id" in i and i["id"]: study_location_ = StudyLocation.query.get(i["id"]) @@ -46,14 +57,22 @@ def post(self, study_id: int): study_location_ = StudyLocation.from_data(study_obj, i) db.session.add(study_location_) list_of_elements.append(study_location_.to_dict()) + db.session.commit() + return list_of_elements @api.route("/study//metadata/location/") class StudyLocationUpdate(Resource): + """Study Location Metadata""" + def delete(self, study_id: int, location_id: int): + """Delete study location metadata""" study_location_ = StudyLocation.query.get(location_id) + db.session.delete(study_location_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index c8deadc7..cab7ce3f 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyOther +"""API routes for study other metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db from apis.study_metadata_namespace import api @@ -19,60 +20,92 @@ @api.route("/study//metadata/other") class StudyOtherResource(Resource): + """Study Other Metadata""" + @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_other) def get(self, study_id: int): + """Get study other metadata""" study_ = Study.query.get(study_id) + study_other_ = study_.study_other + return study_other_.to_dict() def put(self, study_id: int): + """Update study other metadata""" study_ = Study.query.get(study_id) + study_.study_other.update(request.json) + db.session.commit() + return study_.study_other.to_dict() @api.route("/study//metadata/oversight") class StudyOversightResource(Resource): + """Study Oversight Metadata""" + @api.doc("oversight") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_other) def get(self, study_id: int): + """Get study oversight metadata""" study_ = Study.query.get(study_id) + study_oversight_has_dmc = study_.study_other.oversight_has_dmc + return study_oversight_has_dmc def put(self, study_id: int): + """Update study oversight metadata""" data = request.json + study_ = Study.query.get(study_id) + study_oversight = study_.study_other.oversight_has_dmc = data[ "oversight_has_dmc" ] + study_.touch() + db.session.commit() + return study_oversight +# todo: rename class @api.route("/study//metadata/conditions") class StudyOversightResource(Resource): + """Study Conditions Metadata""" + @api.doc("conditions") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_other) def get(self, study_id: int): + """Get study conditions metadata""" study_ = Study.query.get(study_id) + study_other_conditions = study_.study_other.conditions + return study_other_conditions def put(self, study_id: int): + """Update study conditions metadata""" data = request.json + study_ = Study.query.get(study_id) + study_.study_other.conditions = data + study_.touch() + db.session.commit() + return study_.study_other.conditions diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 197f3186..3be69584 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyOverallOfficial +"""API routes for study overall official metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db, StudyOverallOfficial from apis.study_metadata_namespace import api @@ -19,24 +20,35 @@ @api.route("/study//metadata/overall-official") class StudyOverallOfficialResource(Resource): + """Study Overall Official Metadata""" + @api.doc("overall_official") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") # @api.marshal_with(study_overall_official) def get(self, study_id: int): + """Get study overall official metadata""" study_ = Study.query.get(study_id) + study_overall_official_ = study_.study_overall_official + # sorted_by_date = sorted([i.created_at for i in study_overall_official_]) + sorted_study_overall = sorted( study_overall_official_, key=lambda x: x.created_at ) + return [i.to_dict() for i in sorted_study_overall] def post(self, study_id: int): + """Create study overall official metadata""" data = request.json + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: if "id" in i and i["id"]: study_overall_official_ = StudyOverallOfficial.query.get(i["id"]) @@ -46,15 +58,23 @@ def post(self, study_id: int): study_overall_official_ = StudyOverallOfficial.from_data(study_obj, i) db.session.add(study_overall_official_) list_of_elements.append(study_overall_official_.to_dict()) + db.session.commit() + return list_of_elements @api.route("/study//metadata/overall-official/") class StudyOverallOfficialUpdate(Resource): + """Study Overall Official Metadata""" + def delete(self, study_id: int, overall_official_id: int): + """Delete study overall official metadata""" study_overall_official_ = StudyOverallOfficial.query.get( overall_official_id ) + db.session.delete(study_overall_official_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index e04ce8a5..e70d7a40 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyReference +"""API routes for study reference metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db, StudyReference from apis.study_metadata_namespace import api @@ -20,22 +21,34 @@ @api.route("/study//metadata/reference") class StudyReferenceResource(Resource): + """Study Reference Metadata""" + @api.doc("reference") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_reference) def get(self, study_id: int): + """Get study reference metadata""" study_ = Study.query.get(study_id) + study_reference_ = study_.study_reference + + # todo: remove print print(study_.study_reference) + sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at) + return [s.to_dict() for s in sorted_study_reference] def post(self, study_id: int): + """Create study reference metadata""" data = request.json + study_obj = Study.query.get(study_id) + list_of_elements = [] + for i in data: if "id" in i and i["id"]: study_reference_ = StudyReference.query.get(i["id"]) @@ -45,13 +58,21 @@ def post(self, study_id: int): study_reference_ = StudyReference.from_data(study_obj, i) db.session.add(study_reference_) list_of_elements.append(study_reference_.to_dict()) + db.session.commit() + return list_of_elements @api.route("/study//metadata/reference/") class StudyReferenceUpdate(Resource): + """Study Reference Metadata""" + def delete(self, study_id: int, reference_id: int): + """Delete study reference metadata""" study_reference_ = StudyReference.query.get(reference_id) + db.session.delete(study_reference_) + db.session.commit() + return 204 diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 7a0a030e..f08f8629 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudySponsorsCollaborators +"""API routes for study sponsors and collaborators metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db from apis.study_metadata_namespace import api @@ -29,39 +30,58 @@ @api.route("/study//metadata/sponsors") class StudySponsorsResource(Resource): + """Study Sponsors Metadata""" + @api.doc("sponsors") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(study_sponsors) def get(self, study_id: int): + """Get study sponsors metadata""" study_ = Study.query.get(study_id) + study_sponsors_collaborators_ = study_.study_sponsors_collaborators + return study_sponsors_collaborators_.to_dict() def put(self, study_id: int): + """Update study sponsors metadata""" study_ = Study.query.get(study_id) + study_.study_sponsors_collaborators.update(request.json) + db.session.commit() + return study_.study_sponsors_collaborators.to_dict() @api.route("/study//metadata/collaborators") class StudyCollaboratorsResource(Resource): + """Study Collaborators Metadata""" + @api.doc("collaborators") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_collaborators) def get(self, study_id: int): + """Get study collaborators metadata""" study_ = Study.query.get(study_id) + study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name + return study_collaborators_ def put(self, study_id: int): + """Update study collaborators metadata""" data = request.json + study_ = Study.query.get(study_id) + study_.study_sponsors_collaborators.collaborator_name = data + study_.touch() db.session.commit() + return study_.study_sponsors_collaborators.collaborator_name # @api.route("/study//metadata/collaborators/") diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 32c8cd48..37aa6de8 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -1,6 +1,7 @@ -from flask_restx import Namespace, Resource, fields -from model import Study, db, StudyStatus +"""API routes for study status metadata""" +from flask_restx import Resource, fields from flask import request +from model import Study, db from apis.study_metadata_namespace import api @@ -22,20 +23,29 @@ @api.route("/study//metadata/status") class StudyStatusResource(Resource): + """Study Status Metadata""" + @api.doc("status") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") @api.marshal_with(study_status) def get(self, study_id: int): + """Get study status metadata""" study_ = Study.query.get(study_id) + study_status_ = study_.study_status + return study_status_.to_dict() def put(self, study_id: int): + """Update study status metadata""" study = Study.query.get(study_id) + study.study_status.update(request.json) + db.session.commit() + return study.study_status.to_dict() # @api.route("/study//metadata/status/") From 89ca9cd92d4f59a8d337bdea33912c644af37bcd Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 3 Oct 2023 11:09:42 -0700 Subject: [PATCH 202/505] =?UTF-8?q?=F0=9F=93=9D=20dx:=20swagger=20expansio?= =?UTF-8?q?n=20set=20to=20none?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 57cde384..23455649 100644 --- a/app.py +++ b/app.py @@ -22,7 +22,7 @@ def create_app(): # create and configure the app app = Flask(__name__) # `full` if you want to see all the details - app.config["SWAGGER_UI_DOC_EXPANSION"] = "list" + app.config["SWAGGER_UI_DOC_EXPANSION"] = "none" app.config["RESTX_MASK_SWAGGER"] = False # Initialize config app.config.from_pyfile("config.py") From 197c35ef5e7bb8153c3d3431f495561a8830fa4d Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 3 Oct 2023 12:28:48 -0700 Subject: [PATCH 203/505] fix: contributors GET return --- apis/contributor.py | 2 +- model/study_contributor.py | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index d3534122..5723291d 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -16,7 +16,7 @@ ) -@api.route("/study//contributor") +@api.route("/study//contributors") class AddContributor(Resource): @api.doc("contributor list") @api.response(200, "Success") diff --git a/model/study_contributor.py b/model/study_contributor.py index 9bd0bc73..7fad5bdc 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -22,9 +22,12 @@ def __init__(self, study, user, permission): def to_dict(self): return { - "permission": self.permission, - "study_id": self.study_id, "user_id": self.user_id, + "name": self.user.user_details.first_name if self.user.user_details else None, + "email_address": self.user.email_address, + "orcid": self.user.user_details.orcid if self.user.user_details else None, + "role": self.permission, + "status": None } @staticmethod From 5ea403b66ba69372b53480dd4a1fdb8a19ad80fb Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 3 Oct 2023 19:30:27 +0000 Subject: [PATCH 204/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/study_contributor.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/model/study_contributor.py b/model/study_contributor.py index 7fad5bdc..3bbd866f 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -23,11 +23,13 @@ def __init__(self, study, user, permission): def to_dict(self): return { "user_id": self.user_id, - "name": self.user.user_details.first_name if self.user.user_details else None, + "name": self.user.user_details.first_name + if self.user.user_details + else None, "email_address": self.user.email_address, "orcid": self.user.user_details.orcid if self.user.user_details else None, "role": self.permission, - "status": None + "status": None, } @staticmethod From 486893d633486882fff1f2ae493502fe1dc86fd1 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 3 Oct 2023 14:46:54 -0700 Subject: [PATCH 205/505] fix: logout error fixed --- apis/authentication.py | 6 +++--- apis/study.py | 2 +- apis/study_metadata/study_identification.py | 3 +-- app.py | 5 ++++- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index ad642fcf..41c824fd 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -175,7 +175,6 @@ def is_granted(permission: str, study): "editor": [ "editor", "view", - "permission", "delete_contributor", "add_study", "update_study", @@ -200,10 +199,11 @@ def post(self): """simply logges out user from the system""" resp = make_response() resp.status = 204 - resp.delete_cookie("token") + resp.set_cookie( + "token", "", secure=True, httponly=True, samesite="lax", expires=datetime.datetime.now(timezone.utc) + ) return resp - @api.route("/auth/current-users") class CurrentUsers(Resource): """function is used to see all logged users in the system. For now, it is used for testing purposes""" diff --git a/apis/study.py b/apis/study.py index 6361beb4..c040c09b 100644 --- a/apis/study.py +++ b/apis/study.py @@ -21,7 +21,7 @@ class Studies(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(study) + # @api.marshal_with(study_model) def get(self): """this code ensure each user access and see only allowed studies""" studies = Study.query.filter( diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index e8a7f168..ebee644f 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -32,11 +32,10 @@ class StudyIdentificationResource(Resource): def get(self, study_id: int): """Get study identification metadata""" study_ = Study.query.get(study_id) - identifiers = Identifiers(study_) - return identifiers.to_dict() + def post(self, study_id: int): """Create study identification metadata""" data = request.json diff --git a/app.py b/app.py index 23455649..4e9ac790 100644 --- a/app.py +++ b/app.py @@ -115,7 +115,10 @@ def on_after_request(resp): try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: - resp.delete_cookie("token") + resp.set_cookie( + "token", "", secure=True, httponly=True, samesite="lax", + expires=datetime.datetime.now(timezone.utc) + ) return resp token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) if token_blacklist: From 5979ac7abd4fbb29c9f53f1547f37b40f8506921 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 3 Oct 2023 21:47:43 +0000 Subject: [PATCH 206/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 8 +++++++- apis/study_metadata/study_identification.py | 1 - app.py | 8 ++++++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 41c824fd..d093141a 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -200,10 +200,16 @@ def post(self): resp = make_response() resp.status = 204 resp.set_cookie( - "token", "", secure=True, httponly=True, samesite="lax", expires=datetime.datetime.now(timezone.utc) + "token", + "", + secure=True, + httponly=True, + samesite="lax", + expires=datetime.datetime.now(timezone.utc), ) return resp + @api.route("/auth/current-users") class CurrentUsers(Resource): """function is used to see all logged users in the system. For now, it is used for testing purposes""" diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index ebee644f..cfe34d1f 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -35,7 +35,6 @@ def get(self, study_id: int): identifiers = Identifiers(study_) return identifiers.to_dict() - def post(self, study_id: int): """Create study identification metadata""" data = request.json diff --git a/app.py b/app.py index 4e9ac790..2fbc531e 100644 --- a/app.py +++ b/app.py @@ -116,8 +116,12 @@ def on_after_request(resp): decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: resp.set_cookie( - "token", "", secure=True, httponly=True, samesite="lax", - expires=datetime.datetime.now(timezone.utc) + "token", + "", + secure=True, + httponly=True, + samesite="lax", + expires=datetime.datetime.now(timezone.utc), ) return resp token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) From 42fc7f0fc1a47edd011a0d4d311bcf911ff7243b Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 3 Oct 2023 15:39:43 -0700 Subject: [PATCH 207/505] fix: contributors GET return --- apis/authentication.py | 2 +- apis/dataset.py | 43 ++++++++++++++++-------------------------- apis/study.py | 2 ++ app.py | 7 +++++++ 4 files changed, 26 insertions(+), 28 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 41c824fd..79ba73b8 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -198,10 +198,10 @@ class Logout(Resource): def post(self): """simply logges out user from the system""" resp = make_response() - resp.status = 204 resp.set_cookie( "token", "", secure=True, httponly=True, samesite="lax", expires=datetime.datetime.now(timezone.utc) ) + resp.status = 204 return resp @api.route("/auth/current-users") diff --git a/apis/dataset.py b/apis/dataset.py index c81a00ca..24c3ec56 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -38,7 +38,6 @@ class DatasetList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset) - # @api.expect(body=dataset) def get(self, study_id): study = Study.query.get(study_id) datasets = Dataset.query.filter_by(study=study) @@ -47,11 +46,11 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("update dataset") - # @api.marshal_with(dataset) + @api.expect(dataset) def post(self, study_id): - if is_granted("viewer", study_id): - return "Access denied, you can not modify", 403 study = Study.query.get(study_id) + if not is_granted("add_dataset", study): + return "Access denied, you can not modify", 403 # todo if study.participant id== different study Throw error dataset_ = Dataset.from_data(study, request.json) db.session.add(dataset_) @@ -73,7 +72,8 @@ def get(self, study_id, dataset_id): @api.response(201, "Success") @api.response(400, "Validation Error") def put(self, study_id, dataset_id): - if is_granted("viewer", study_id): + study = Study.query.get(study_id) + if not is_granted("update_dataset", study): return "Access denied, you can not modify", 403 data = request.json data_obj = Dataset.query.get(dataset_id) @@ -84,30 +84,16 @@ def put(self, study_id, dataset_id): @api.response(201, "Success") @api.response(400, "Validation Error") def delete(self, study_id, dataset_id): - if is_granted("viewer", study_id): + study = Study.query.get(study_id) + if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 data_obj = Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: db.session.delete(version) db.session.delete(data_obj) db.session.commit() - return "", 204 - # - # - # delete_study = Study.query.get(study_id) - # for d in delete_study.dataset: - # for version in d.dataset_versions: - # version.participants.clear() - # for d in delete_study.dataset: - # for version in d .dataset_versions: - # db.session.delete(version) - # db.session.delete(d) - # for p in delete_study.participants: - # db.session.delete(p) - # db.session.delete(delete_study) - # db.session.commit() - # return "", 204 - # + dataset_ =study.dataset + return [d.to_dict() for d in dataset_], 201 # def delete(self, study_id, dataset_id, version_id): # data_obj = Dataset.query.get(dataset_id) @@ -130,15 +116,17 @@ def get(self, study_id, dataset_id, version_id): return dataset_version.to_dict() def put(self, study_id, dataset_id, version_id): - if is_granted("viewer", study_id): + study = Study.query.get(study_id) + if not is_granted("publish_dataset", study): return "Access denied, you can not modify", 403 data_version_obj = Version.query.get(version_id) data_version_obj.update(request.json) db.session.commit() - return jsonify(data_version_obj.to_dict()) + return jsonify(data_version_obj.to_dict()), 201 def delete(self, study_id, dataset_id, version_id): - if is_granted("viewer", study_id): + study = Study.query.get(study_id) + if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 data_obj = Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: @@ -154,7 +142,8 @@ def delete(self, study_id, dataset_id, version_id): @api.response(400, "Validation Error") class VersionList(Resource): def post(self, study_id: int, dataset_id: int): - if is_granted("viewer", study_id): + study = Study.query.get(study_id) + if not is_granted("publish_version", study): return "Access denied, you can not modify", 403 data = request.json data["participants"] = [Participant.query.get(i) for i in data["participants"]] diff --git a/apis/study.py b/apis/study.py index c040c09b..58ef2bf1 100644 --- a/apis/study.py +++ b/apis/study.py @@ -27,6 +27,8 @@ def get(self): studies = Study.query.filter( Study.study_contributors.any(User.id == g.user.id) ).all() + # studies = Study.query.filter(User.id == g.user.id).all() + return [s.to_dict() for s in studies] @api.expect(study_model) diff --git a/app.py b/app.py index 4e9ac790..2db53012 100644 --- a/app.py +++ b/app.py @@ -107,6 +107,13 @@ def on_before_request(): @app.after_request def on_after_request(resp): + public_routes = [ + "/auth", + ] + print("g.user", g.user) + for route in public_routes: + if request.path.startswith(route): + return resp print("after request") print(request.cookies.get("token")) if "token" not in request.cookies: From 0a98479b3fed775d2ab56f4ff7a616cb0b110945 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 3 Oct 2023 16:11:21 -0700 Subject: [PATCH 208/505] fix: identification GET/POST ENDPOINTS --- apis/study_metadata/study_identification.py | 5 ++++- app.py | 5 ++++- model/study_metadata/identifiers.py | 9 ++++----- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index ebee644f..bfbc82a8 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -35,7 +35,10 @@ def get(self, study_id: int): identifiers = Identifiers(study_) return identifiers.to_dict() - + @api.doc("identification add") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.expect(study_identification) def post(self, study_id: int): """Create study identification metadata""" data = request.json diff --git a/app.py b/app.py index 2db53012..d095a20c 100644 --- a/app.py +++ b/app.py @@ -109,8 +109,11 @@ def on_before_request(): def on_after_request(resp): public_routes = [ "/auth", + "/docs", + "/echo", + "/swaggerui", + "/swagger.json", ] - print("g.user", g.user) for route in public_routes: if request.path.startswith(route): return resp diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 746afd50..7e019151 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -13,11 +13,10 @@ def to_dict(self): key=lambda identifier: identifier.created_at, ) return { - "primary": [ - identifier - for identifier in sorted_study_identifications - if not identifier.secondary - ][0].to_dict(), + "primary": + [identifier for identifier in sorted_study_identifications + if not identifier.secondary][0].to_dict() if len([identifier for identifier in + sorted_study_identifications if not identifier.secondary]) != 0 else [], "secondary": [ identifier.to_dict() for identifier in sorted_study_identifications From 719cc469210dea11b300e5543b558990f4496d27 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 3 Oct 2023 17:50:02 -0700 Subject: [PATCH 209/505] fix: study return only associated users --- apis/contributor.py | 29 ++++++++++++++++++++++++++++- apis/study.py | 17 +++++++++++++---- app.py | 9 +++++---- model/study.py | 7 +++++++ 4 files changed, 53 insertions(+), 9 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 4513b408..bb4a1b0f 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -2,7 +2,13 @@ from flask_restx import Namespace, Resource, fields from flask import request, g -from model import StudyContributor, Study, db, User +from model import ( + Study, + db, + User, + StudyException, + StudyContributor +) from .authentication import is_granted api = Namespace("Contributor", description="Contributors", path="/") @@ -28,6 +34,27 @@ def get(self, study_id: int): ).all() return [c.to_dict() for c in contributors] + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(contributors_model) + def post(self, study_id: int): + study_obj = Study.query.get(study_id) + if not is_granted("invite_contributor", study_obj): + return "Access denied, you can not modify", 403 + data = request.json + email_address = data["email_address"] + user = User.query.filter_by(email_address=email_address).first() + permission = data["permission"] + contributor_ = None + try: + if user: + contributor_ = study_obj.add_user_to_study(user, permission) + else: + contributor_ = study_obj.invite_user_to_study(email_address, permission) + except StudyException as ex: + return ex.args[0], 409 + db.session.commit() + return contributor_.to_dict(), 201 @api.route("/study//contributor/") class ContributorResource(Resource): diff --git a/apis/study.py b/apis/study.py index 58ef2bf1..b9cf72d9 100644 --- a/apis/study.py +++ b/apis/study.py @@ -24,11 +24,20 @@ class Studies(Resource): # @api.marshal_with(study_model) def get(self): """this code ensure each user access and see only allowed studies""" - studies = Study.query.filter( - Study.study_contributors.any(User.id == g.user.id) - ).all() + # studies = Study.query.filter( + # Study.study_contributors.any(User.id == g.user.id) + # ).all() # studies = Study.query.filter(User.id == g.user.id).all() - + study_contributors = ( + StudyContributor.query + .filter(StudyContributor.user_id == g.user.id) # Filter contributors where user_id matches the user's id + .all() + ) + study_ids = [contributor.study_id for contributor in study_contributors] + + studies = ( + Study.query.filter(Study.id.in_(study_ids)).all() + ) return [s.to_dict() for s in studies] @api.expect(study_model) diff --git a/app.py b/app.py index d095a20c..e5330a16 100644 --- a/app.py +++ b/app.py @@ -109,10 +109,11 @@ def on_before_request(): def on_after_request(resp): public_routes = [ "/auth", - "/docs", - "/echo", - "/swaggerui", - "/swagger.json", + "/docs", + "/echo", + "/swaggerui", + "/swagger.json", + "/ favicon.ico" ] for route in public_routes: if request.path.startswith(route): diff --git a/model/study.py b/model/study.py index feff41c4..4ed5c1c7 100644 --- a/model/study.py +++ b/model/study.py @@ -4,6 +4,7 @@ import model from .db import db import datetime +from flask import g class StudyException(Exception): @@ -146,6 +147,9 @@ def __init__(self): ) def to_dict(self): + filtered_contributors = [c.to_dict() for c in self.study_contributors] + # contributors = model.StudyContributor.query.get(self.study_contributors.id).first() + """Converts the study to a dictionary""" return { "id": self.id, @@ -158,8 +162,11 @@ def to_dict(self): "description": self.study_description.brief_summary if self.study_description else None, + # "role": [i.to_dict()["role"] for i in self.study_contributors], + "owner_id": filtered_contributors } + @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" From c246f9096c609c614b59373be6784d706101c7ae Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 4 Oct 2023 00:51:49 +0000 Subject: [PATCH 210/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 9 ++------- apis/dataset.py | 2 +- apis/study.py | 12 ++++-------- app.py | 2 +- model/study.py | 3 +-- model/study_metadata/identifiers.py | 18 ++++++++++++++---- 6 files changed, 23 insertions(+), 23 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index bb4a1b0f..e80a9ef9 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -2,13 +2,7 @@ from flask_restx import Namespace, Resource, fields from flask import request, g -from model import ( - Study, - db, - User, - StudyException, - StudyContributor -) +from model import Study, db, User, StudyException, StudyContributor from .authentication import is_granted api = Namespace("Contributor", description="Contributors", path="/") @@ -56,6 +50,7 @@ def post(self, study_id: int): db.session.commit() return contributor_.to_dict(), 201 + @api.route("/study//contributor/") class ContributorResource(Resource): @api.doc("contributor update") diff --git a/apis/dataset.py b/apis/dataset.py index 24c3ec56..8abebccb 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -92,7 +92,7 @@ def delete(self, study_id, dataset_id): db.session.delete(version) db.session.delete(data_obj) db.session.commit() - dataset_ =study.dataset + dataset_ = study.dataset return [d.to_dict() for d in dataset_], 201 # def delete(self, study_id, dataset_id, version_id): diff --git a/apis/study.py b/apis/study.py index b9cf72d9..926caf86 100644 --- a/apis/study.py +++ b/apis/study.py @@ -28,16 +28,12 @@ def get(self): # Study.study_contributors.any(User.id == g.user.id) # ).all() # studies = Study.query.filter(User.id == g.user.id).all() - study_contributors = ( - StudyContributor.query - .filter(StudyContributor.user_id == g.user.id) # Filter contributors where user_id matches the user's id - .all() - ) + study_contributors = StudyContributor.query.filter( + StudyContributor.user_id == g.user.id + ).all() # Filter contributors where user_id matches the user's id study_ids = [contributor.study_id for contributor in study_contributors] - studies = ( - Study.query.filter(Study.id.in_(study_ids)).all() - ) + studies = Study.query.filter(Study.id.in_(study_ids)).all() return [s.to_dict() for s in studies] @api.expect(study_model) diff --git a/app.py b/app.py index 669d400a..0ad93228 100644 --- a/app.py +++ b/app.py @@ -113,7 +113,7 @@ def on_after_request(resp): "/echo", "/swaggerui", "/swagger.json", - "/ favicon.ico" + "/ favicon.ico", ] for route in public_routes: if request.path.startswith(route): diff --git a/model/study.py b/model/study.py index 4ed5c1c7..f261ab70 100644 --- a/model/study.py +++ b/model/study.py @@ -163,10 +163,9 @@ def to_dict(self): if self.study_description else None, # "role": [i.to_dict()["role"] for i in self.study_contributors], - "owner_id": filtered_contributors + "owner_id": filtered_contributors, } - @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 7e019151..fa5d4cd9 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -13,10 +13,20 @@ def to_dict(self): key=lambda identifier: identifier.created_at, ) return { - "primary": - [identifier for identifier in sorted_study_identifications - if not identifier.secondary][0].to_dict() if len([identifier for identifier in - sorted_study_identifications if not identifier.secondary]) != 0 else [], + "primary": [ + identifier + for identifier in sorted_study_identifications + if not identifier.secondary + ][0].to_dict() + if len( + [ + identifier + for identifier in sorted_study_identifications + if not identifier.secondary + ] + ) + != 0 + else [], "secondary": [ identifier.to_dict() for identifier in sorted_study_identifications From 78aec10bb91caf55989ce18845919cd4dd87754f Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 4 Oct 2023 10:28:10 -0700 Subject: [PATCH 211/505] fix: contributors GET show all users --- apis/contributor.py | 23 ++++++++++++++++++----- app.py | 4 ++-- model/study.py | 7 ++++--- 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index bb4a1b0f..98d62cc1 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -7,7 +7,8 @@ db, User, StudyException, - StudyContributor + StudyContributor, + StudyInvitedContributor ) from .authentication import is_granted @@ -22,17 +23,28 @@ ) -@api.route("/study//contributors") +@api.route("/study//contributor") class AddContributor(Resource): @api.doc("contributor list") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def get(self, study_id: int): + # study_contributors = ( + # StudyContributor.query + # .filter(StudyContributor.user_id == g.user.id) # Filter contributors where user_id matches the user's id + # .all() + # ) contributors = StudyContributor.query.filter_by( - user_id=g.user.id, study_id=study_id + study_id=study_id ).all() - return [c.to_dict() for c in contributors] + invited_contributors = StudyInvitedContributor.query.filter_by( + study_id=study_id + ).all() + + contributors_list = [c.to_dict() for c in contributors] + [c.to_dict() for c in invited_contributors] + return contributors_list + @api.response(200, "Success") @api.response(400, "Validation Error") @@ -44,7 +56,7 @@ def post(self, study_id: int): data = request.json email_address = data["email_address"] user = User.query.filter_by(email_address=email_address).first() - permission = data["permission"] + permission = data["role"] contributor_ = None try: if user: @@ -56,6 +68,7 @@ def post(self, study_id: int): db.session.commit() return contributor_.to_dict(), 201 + @api.route("/study//contributor/") class ContributorResource(Resource): @api.doc("contributor update") diff --git a/app.py b/app.py index 669d400a..aaa09c95 100644 --- a/app.py +++ b/app.py @@ -118,8 +118,8 @@ def on_after_request(resp): for route in public_routes: if request.path.startswith(route): return resp - print("after request") - print(request.cookies.get("token")) + # print("after request") + # print(request.cookies.get("token")) if "token" not in request.cookies: return resp token = request.cookies.get("token") diff --git a/model/study.py b/model/study.py index 4ed5c1c7..a5b293f7 100644 --- a/model/study.py +++ b/model/study.py @@ -5,7 +5,7 @@ from .db import db import datetime from flask import g - +from sqlalchemy import and_ class StudyException(Exception): pass @@ -148,7 +148,7 @@ def __init__(self): def to_dict(self): filtered_contributors = [c.to_dict() for c in self.study_contributors] - # contributors = model.StudyContributor.query.get(self.study_contributors.id).first() + contributors = model.StudyContributor.query.filter_by().first() """Converts the study to a dictionary""" return { @@ -163,7 +163,8 @@ def to_dict(self): if self.study_description else None, # "role": [i.to_dict()["role"] for i in self.study_contributors], - "owner_id": filtered_contributors + "owner_id": contributors.to_dict()["user_id"], + "role": contributors.to_dict()["role"] } From bee7494a15665df75148347059ddd880bfee5ed8 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 4 Oct 2023 14:02:18 -0700 Subject: [PATCH 212/505] feat: add OWNER/user_id PUT endpoint --- apis/authentication.py | 1 + apis/contributor.py | 42 +++++++++++++++++++++++++++++++++++------- app.py | 1 + model/study.py | 3 +++ 4 files changed, 40 insertions(+), 7 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index ff74fb11..6bd93b8b 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -171,6 +171,7 @@ def is_granted(permission: str, study): "participant", "study_metadata", "dataset_metadata", + "make_owner" ], "editor": [ "editor", diff --git a/apis/contributor.py b/apis/contributor.py index 98d62cc1..06cfdbec 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -77,17 +77,15 @@ class ContributorResource(Resource): @api.expect(contributors_model) def put(self, study_id: int, user_id: int): """update contributor based on the assigned permissions""" - study = Study.query.get(study_id) if not is_granted("permission", study): return ( "Access denied, you are not authorized to change this permission", 403, ) - data = request.json user = User.query.get(user_id) - permission = data["permission"] + permission = data["role"] grantee = StudyContributor.query.filter( StudyContributor.user == user, StudyContributor.study == study ).first() @@ -100,7 +98,7 @@ def put(self, study_id: int, user_id: int): grants = OrderedDict() grants["viewer"] = [] grants["editor"] = ["viewer"] - grants["admin"] = ["viewer", "editor"] + grants["admin"] = ["viewer", "editor", "admin"] grants["owner"] = ["editor", "viewer", "admin"] can_grant = permission in grants[granter.permission] @@ -110,10 +108,10 @@ def put(self, study_id: int, user_id: int): # Granter can not downgrade anyone of equal or greater permissions other than themselves # TODO: Owners downgrading themselves if user != g.user: - grantee_level = list(grants.keys()).index(grantee.permission) # 2 - new_level = list(grants.keys()).index(permission) # 0 + grantee_level = list(grants.keys()).index(grantee.permission) # 1 + new_level = list(grants.keys()).index(permission) # 2 granter_level = list(grants.keys()).index(granter.permission) # 2 - if granter_level <= grantee_level and new_level < grantee_level: + if granter_level <= grantee_level and new_level <= grantee_level: return ( f"User cannot downgrade from {grantee.permission} to {permission}", 403, @@ -163,3 +161,33 @@ def delete(self, study_id: int, user_id: int): StudyContributor.study == study ).all() return [contributor.to_dict() for contributor in contributors], 200 + + +@api.route("/study//owner/") +class AssignOwner(Resource): + @api.doc("contributor update") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.expect(contributors_model) + def put(self, study_id: int, user_id: int): + data = request.json + """set owner based on the assigned permissions""" + study = Study.query.get(study_id) + if not is_granted("make_owner", study): + return "Access denied, you are not authorized to change this permission", 403 + if not data["role"] == "owner": + return "you can assign only owner", 403 + user = User.query.get(user_id) + existing_contributor = StudyContributor.query.filter( + StudyContributor.user == user, + StudyContributor.study == study, + + ).first() + existing_contributor.permission = "owner" + existing_owner = StudyContributor.query.filter( + StudyContributor.study == study, + StudyContributor.permission == "owner" + ).first() + existing_owner.permission = "admin" + db.session.commit() + return existing_contributor.to_dict(), 200 diff --git a/app.py b/app.py index aaa09c95..f12fafb7 100644 --- a/app.py +++ b/app.py @@ -86,6 +86,7 @@ def create_app(): # def create_schema(): # engine = model.db.session.get_bind() # metadata = MetaData() + # metadata = MetaData() # metadata.reflect(bind=engine) # table_names = [table.name for table in metadata.tables.values()] # print(table_names) diff --git a/model/study.py b/model/study.py index a5b293f7..7871f169 100644 --- a/model/study.py +++ b/model/study.py @@ -219,3 +219,6 @@ def invite_user_to_study(self, email_address, permission): ) db.session.add(contributor_add) return contributor_add + + def get_owner(self): + return self.study_contributors.query.filter_by(self.study_contributors.permission == "owner") From adf9b209ab5400497dd34d381f2b2521c44957fe Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 4 Oct 2023 23:57:04 +0000 Subject: [PATCH 213/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 +- apis/contributor.py | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 6bd93b8b..2d5fb065 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -171,7 +171,7 @@ def is_granted(permission: str, study): "participant", "study_metadata", "dataset_metadata", - "make_owner" + "make_owner", ], "editor": [ "editor", diff --git a/apis/contributor.py b/apis/contributor.py index 039a07ac..798d03c0 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -8,7 +8,7 @@ User, StudyException, StudyContributor, - StudyInvitedContributor + StudyInvitedContributor, ) from .authentication import is_granted @@ -35,17 +35,16 @@ def get(self, study_id: int): # .filter(StudyContributor.user_id == g.user.id) # Filter contributors where user_id matches the user's id # .all() # ) - contributors = StudyContributor.query.filter_by( - study_id=study_id - ).all() + contributors = StudyContributor.query.filter_by(study_id=study_id).all() invited_contributors = StudyInvitedContributor.query.filter_by( - study_id=study_id + study_id=study_id ).all() - contributors_list = [c.to_dict() for c in contributors] + [c.to_dict() for c in invited_contributors] + contributors_list = [c.to_dict() for c in contributors] + [ + c.to_dict() for c in invited_contributors + ] return contributors_list - @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) @@ -174,19 +173,20 @@ def put(self, study_id: int, user_id: int): """set owner based on the assigned permissions""" study = Study.query.get(study_id) if not is_granted("make_owner", study): - return "Access denied, you are not authorized to change this permission", 403 + return ( + "Access denied, you are not authorized to change this permission", + 403, + ) if not data["role"] == "owner": return "you can assign only owner", 403 user = User.query.get(user_id) existing_contributor = StudyContributor.query.filter( StudyContributor.user == user, StudyContributor.study == study, - ).first() existing_contributor.permission = "owner" existing_owner = StudyContributor.query.filter( - StudyContributor.study == study, - StudyContributor.permission == "owner" + StudyContributor.study == study, StudyContributor.permission == "owner" ).first() existing_owner.permission = "admin" db.session.commit() From 956a67835bc4137c03dc4c805eb4ed684001581a Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 4 Oct 2023 17:12:48 -0700 Subject: [PATCH 214/505] feat: add OWNER/user_id PUT endpoint --- apis/authentication.py | 1 - apis/contributor.py | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 6bd93b8b..47b07b31 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -176,7 +176,6 @@ def is_granted(permission: str, study): "editor": [ "editor", "view", - "delete_contributor", "add_study", "update_study", "add_dataset", diff --git a/apis/contributor.py b/apis/contributor.py index 039a07ac..1f27180a 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -23,7 +23,7 @@ ) -@api.route("/study//contributors") +@api.route("/study//contributor") class AddContributor(Resource): @api.doc("contributor list") @api.response(200, "Success") @@ -124,7 +124,6 @@ def put(self, study_id: int, user_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: int): - data = request.json study = Study.query.get(study_id) if not is_granted("delete_contributors", study): return ( From 749622347ecb463141a7cbde65501b9f6d813f38 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 4 Oct 2023 22:33:18 -0700 Subject: [PATCH 215/505] feat: add permissions for study endpoint --- apis/authentication.py | 10 +++- apis/invited_contributor.py | 47 ------------------- apis/study_metadata/study_arm.py | 10 ++-- apis/study_metadata/study_available_ipd.py | 9 +++- apis/study_metadata/study_contact.py | 8 ++++ apis/study_metadata/study_description.py | 5 +- apis/study_metadata/study_design.py | 4 ++ apis/study_metadata/study_eligibility.py | 4 +- apis/study_metadata/study_identification.py | 7 ++- apis/study_metadata/study_intervention.py | 11 +++-- apis/study_metadata/study_ipdsharing.py | 17 ++----- apis/study_metadata/study_link.py | 15 +++--- apis/study_metadata/study_location.py | 11 +++-- apis/study_metadata/study_other.py | 27 +++++------ apis/study_metadata/study_overall_official.py | 17 ++++--- apis/study_metadata/study_reference.py | 11 +++-- .../study_sponsors_collaborators.py | 29 ++++-------- apis/study_metadata/study_status.py | 12 ++--- 18 files changed, 116 insertions(+), 138 deletions(-) delete mode 100644 apis/invited_contributor.py diff --git a/apis/authentication.py b/apis/authentication.py index f978d313..27f06fb8 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -3,7 +3,7 @@ from model import StudyContributor from datetime import timezone import datetime -from model import db, User, TokenBlacklist +from model import db, User, TokenBlacklist, Study import jwt import config import uuid @@ -131,7 +131,7 @@ def authorization(): raise UnauthenticatedException("Access denied", 403) -def is_granted(permission: str, study): +def is_granted(permission: str, study=None): """filters users and checks whether current permission equal to passed permission""" contributor = StudyContributor.query.filter( StudyContributor.user == g.user, StudyContributor.study == study @@ -191,6 +191,12 @@ def is_granted(permission: str, study): return permission in role[contributor.permission] +def is_study_metadata(study_id: int): + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + + @api.route("/auth/logout") class Logout(Resource): @api.response(200, "Success") diff --git a/apis/invited_contributor.py b/apis/invited_contributor.py deleted file mode 100644 index d420957c..00000000 --- a/apis/invited_contributor.py +++ /dev/null @@ -1,47 +0,0 @@ -from flask_restx import Namespace, Resource, fields -from model import ( - Study, - db, - User, - StudyException, -) -from flask import request -from .authentication import is_granted - -api = Namespace("Invited_contributors", description="Invited contributors", path="/") - - -contributors_model = api.model( - "InvitedContributor", - { - "permission": fields.String(required=True), - "email_address": fields.String(required=True), - }, -) - - -@api.route("/study//invited-contributor") -class AddInvitedContributor(Resource): - @api.doc("invited contributor") - @api.expect(contributors_model) - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(contributors_model) - def post(self, study_id: int): - study_obj = Study.query.get(study_id) - if not is_granted("invite_contributor", study_obj): - return "Access denied, you can not modify", 403 - data = request.json - email_address = data["email_address"] - user = User.query.filter_by(email_address=email_address).first() - permission = data["permission"] - contributor_ = None - try: - if user: - contributor_ = study_obj.add_user_to_study(user, permission) - else: - contributor_ = study_obj.invite_user_to_study(email_address, permission) - except StudyException as ex: - return ex.args[0], 409 - db.session.commit() - return contributor_.to_dict(), 201 diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 116beb86..4173214b 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db, StudyArm, Arm +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -43,10 +44,11 @@ def get(self, study_id): def post(self, study_id): """Create study arm metadata""" + study = Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 data = request.json - study_obj = Study.query.get(study_id) - for i in data: if "id" in i and i["id"]: study_arm_ = StudyArm.query.get(i["id"]) @@ -68,8 +70,10 @@ class StudyArmUpdate(Resource): def delete(self, study_id: int, arm_id: int): """Delete study arm metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_arm_ = StudyArm.query.get(arm_id) - db.session.delete(study_arm_) db.session.commit() diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 07058264..c35f8273 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -3,6 +3,8 @@ from flask import request from model import Study, db, StudyAvailableIpd from apis.study_metadata_namespace import api +from ..authentication import is_granted + study_available = api.model( "StudyAvailable", @@ -42,6 +44,9 @@ def get(self, study_id: int): @api.marshal_with(study_available) def post(self, study_id: int): """Create study available metadata""" + study = Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 data = request.json study_obj = Study.query.get(study_id) @@ -69,7 +74,9 @@ class StudyLocationUpdate(Resource): def delete(self, study_id: int, available_ipd_id: int): """Delete study available metadata""" - + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_available_ = StudyAvailableIpd.query.get(available_ipd_id) db.session.delete(study_available_) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 262f86cd..e7f6dcbd 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -3,6 +3,8 @@ from flask import request from model import Study, db, StudyContact from apis.study_metadata_namespace import api +from ..authentication import is_granted, is_study_metadata + study_contact = api.model( "StudyContact", @@ -39,6 +41,9 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study contact metadata""" + study = Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 data = request.json study_obj = Study.query.get(study_id) @@ -64,6 +69,9 @@ class StudyContactUpdate(Resource): """Study Contact Metadata""" def delete(self, study_id: int, central_contact_id: int): + study = Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 """Delete study contact metadata""" study_contact_ = StudyContact.query.get(central_contact_id) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index bcfb0f6c..086e8620 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -2,7 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db - +from ..authentication import is_granted, is_study_metadata from apis.study_metadata_namespace import api @@ -35,6 +35,9 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study description metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_ = Study.query.get(study_id) study_.study_description.update(request.json) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 7c2d21a7..0821708a 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -49,6 +50,9 @@ def get(self, study_id: int): return study_design_.to_dict() def put(self, study_id: int): + study = Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 """Update study design metadata""" study_ = Study.query.get(study_id) diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 1a340a5c..0a0d5955 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -46,7 +47,8 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study eligibility metadata""" study_ = Study.query.get(study_id) - + if not is_granted("study_metadata", study_): + return "Access denied, you can not delete study", 403 study_.study_eligibility.update(request.json) db.session.commit() diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index bfbc82a8..718564d0 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db, StudyIdentification, Identifiers +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -44,7 +45,8 @@ def post(self, study_id: int): data = request.json study_obj = Study.query.get(study_id) - + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 primary = data["primary"] primary["secondary"] = False @@ -81,6 +83,9 @@ class StudyIdentificationdUpdate(Resource): def delete(self, study_id: int, identification_id: int): """Delete study identification metadata""" + study = Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 study_identification_ = StudyIdentification.query.get(identification_id) if not study_identification_.secondary: diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 32d36cfb..4de6d19e 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db, StudyIntervention +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -43,12 +44,11 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study intervention metadata""" - data = request.json - study_obj = Study.query.get(study_id) - + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 list_of_elements = [] - + data = request.json for i in data: if "id" in i and i["id"]: study_intervention_ = StudyIntervention.query.get(i["id"]) @@ -69,6 +69,9 @@ class StudyInterventionUpdate(Resource): def delete(self, study_id: int, intervention_id: int): """Delete study intervention metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_intervention_ = StudyIntervention.query.get(intervention_id) db.session.delete(study_intervention_) diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index ec62e23a..c9c5bc8e 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -23,8 +24,7 @@ @api.route("/study//metadata/ipdsharing") class StudyIpdsharingResource(Resource): - """Study Ipdsharing Metadata""" - + """Study Ipd sharing Metadata""" @api.doc("ipdsharing") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -39,17 +39,8 @@ def get(self, study_id: int): def put(self, study_id: int): """Create study ipdsharing metadata""" study_ = Study.query.get(study_id) - + if not is_granted("study_metadata", study_): + return "Access denied, you can not delete study", 403 study_.study_ipdsharing.update(request.json) - db.session.commit() - return study_.study_ipdsharing.to_dict() - - # def post(self, study_id: int): - # data = request.json - # study_ipdsharing_ = Study.query.get(study_id) - # study_ipdsharing_ = StudyIpdsharing.from_data(study_ipdsharing_, data) - # db.session.add(study_ipdsharing_) - # db.session.commit() - # return study_ipdsharing_.to_dict() diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index bdf679b7..ad4b9397 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -2,7 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db, StudyLink - +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -29,21 +29,17 @@ class StudyLinkResource(Resource): def get(self, study_id: int): """Get study link metadata""" study_ = Study.query.get(study_id) - study_link_ = study_.study_link - sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at) - return [s.to_dict() for s in sorted_study_link_] def post(self, study_id: int): """Create study link metadata""" - data = request.json - study_obj = Study.query.get(study_id) - + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + data = request.json list_of_elements = [] - for i in data: if "id" in i and i["id"]: study_link_ = StudyLink.query.get(i["id"]) @@ -68,6 +64,9 @@ class StudyLinkUpdate(Resource): def delete(self, study_id: int, link_id: int): """Delete study link metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_link_ = StudyLink.query.get(link_id) db.session.delete(study_link_) diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 2f5def7a..d9977234 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db, StudyLocation +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -42,12 +43,11 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study location metadata""" - data = request.json - study_obj = Study.query.get(study_id) - + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + data = request.json list_of_elements = [] - for i in data: if "id" in i and i["id"]: study_location_ = StudyLocation.query.get(i["id"]) @@ -69,6 +69,9 @@ class StudyLocationUpdate(Resource): def delete(self, study_id: int, location_id: int): """Delete study location metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_location_ = StudyLocation.query.get(location_id) db.session.delete(study_location_) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index cab7ce3f..5c1b3011 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -64,16 +65,14 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study oversight metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 data = request.json - - study_ = Study.query.get(study_id) - - study_oversight = study_.study_other.oversight_has_dmc = data[ + study_oversight = study_obj.study_other.oversight_has_dmc = data[ "oversight_has_dmc" ] - - study_.touch() - + study_obj.touch() db.session.commit() return study_oversight @@ -99,13 +98,11 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study conditions metadata""" data = request.json - - study_ = Study.query.get(study_id) - - study_.study_other.conditions = data - - study_.touch() - + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + study_obj.study_other.conditions = data + study_obj.touch() db.session.commit() - return study_.study_other.conditions + return study_obj.study_other.conditions diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 3be69584..9c1c6a16 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -5,6 +5,7 @@ from apis.study_metadata_namespace import api +from ..authentication import is_granted study_overall_official = api.model( @@ -41,14 +42,15 @@ def get(self, study_id: int): return [i.to_dict() for i in sorted_study_overall] + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int): """Create study overall official metadata""" data = request.json - study_obj = Study.query.get(study_id) - + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 list_of_elements = [] - for i in data: if "id" in i and i["id"]: study_overall_official_ = StudyOverallOfficial.query.get(i["id"]) @@ -65,16 +67,17 @@ def post(self, study_id: int): @api.route("/study//metadata/overall-official/") class StudyOverallOfficialUpdate(Resource): - """Study Overall Official Metadata""" - + @api.response(200, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, overall_official_id: int): """Delete study overall official metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_overall_official_ = StudyOverallOfficial.query.get( overall_official_id ) - db.session.delete(study_overall_official_) - db.session.commit() return 204 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index e70d7a40..93f27447 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db, StudyReference +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -43,12 +44,11 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study reference metadata""" - data = request.json - study_obj = Study.query.get(study_id) - + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + data = request.json list_of_elements = [] - for i in data: if "id" in i and i["id"]: study_reference_ = StudyReference.query.get(i["id"]) @@ -69,6 +69,9 @@ class StudyReferenceUpdate(Resource): def delete(self, study_id: int, reference_id: int): """Delete study reference metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study_reference_ = StudyReference.query.get(reference_id) db.session.delete(study_reference_) diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index f08f8629..89566ffe 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db +from ..authentication import is_granted from apis.study_metadata_namespace import api @@ -71,25 +72,15 @@ def get(self, study_id: int): return study_collaborators_ + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): - """Update study collaborators metadata""" + """updating study collaborators""" data = request.json - - study_ = Study.query.get(study_id) - - study_.study_sponsors_collaborators.collaborator_name = data - - study_.touch() + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + study_obj.study_sponsors_collaborators.collaborator_name = data + study_obj.touch() db.session.commit() - - return study_.study_sponsors_collaborators.collaborator_name - - # @api.route("/study//metadata/collaborators/") - # class StudyCollaboratorsUpdate(Resource): - # def put(self, study_id: int, collaborators_id: int): - # study_sponsors_collaborators_ = StudySponsorsCollaborators.query.get( - # collaborators_id - # ) - # study_sponsors_collaborators_.update(request.json) - # db.session.commit() - # return study_sponsors_collaborators_.to_dict() + return study_obj.study_sponsors_collaborators.collaborator_name diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 37aa6de8..3fe2b9e5 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields from flask import request from model import Study, db +from ..authentication import is_granted, is_study_metadata from apis.study_metadata_namespace import api @@ -40,6 +41,9 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study status metadata""" + study_obj = Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 study = Study.query.get(study_id) study.study_status.update(request.json) @@ -47,11 +51,3 @@ def put(self, study_id: int): db.session.commit() return study.study_status.to_dict() - - # @api.route("/study//metadata/status/") - # class StudyStatusUpdate(Resource): - # def put(self, study_id: int, status_id: int): - # study_status_ = StudyStatus.query.get(status_id) - # study_status_.update(request.json) - # db.session.commit() - # return study_status_.to_dict() From 2eee65a2f2973ddc39417a7e5a1a589c23157a11 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 4 Oct 2023 23:04:31 -0700 Subject: [PATCH 216/505] fix: study get endpoint added owner id and role --- apis/__init__.py | 2 -- model/study.py | 11 +++++------ 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 282a256e..749db876 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -9,7 +9,6 @@ from .dataset import api as dataset_api from .participant import api as participants_api from .study import api as study_api -from .invited_contributor import api as invited_contributors from .authentication import api as authentication @@ -82,4 +81,3 @@ def get(self): api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) -api.add_namespace(invited_contributors) diff --git a/model/study.py b/model/study.py index f261ab70..8f8d289e 100644 --- a/model/study.py +++ b/model/study.py @@ -147,9 +147,9 @@ def __init__(self): ) def to_dict(self): - filtered_contributors = [c.to_dict() for c in self.study_contributors] - # contributors = model.StudyContributor.query.get(self.study_contributors.id).first() - + contributors = model.StudyContributor.query.filter( + model.StudyContributor.permission == "owner" + ).first() """Converts the study to a dictionary""" return { "id": self.id, @@ -157,13 +157,12 @@ def to_dict(self): "image": self.image, "created_at": self.created_at, "updated_on": self.updated_on, - # "study_contributors": self.study_contributors.to_dict(), "size": self.study_other.size if self.study_other else None, "description": self.study_description.brief_summary if self.study_description else None, - # "role": [i.to_dict()["role"] for i in self.study_contributors], - "owner_id": filtered_contributors, + "owner_id": contributors.to_dict()["user_id"], + "role": contributors.to_dict()["role"] } @staticmethod From 3cc3261f9674d7a7a58175f5e19c59e4e0dad1fe Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 5 Oct 2023 06:05:09 +0000 Subject: [PATCH 217/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_ipdsharing.py | 1 + model/study.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index c9c5bc8e..d4cc2701 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -25,6 +25,7 @@ @api.route("/study//metadata/ipdsharing") class StudyIpdsharingResource(Resource): """Study Ipd sharing Metadata""" + @api.doc("ipdsharing") @api.response(200, "Success") @api.response(400, "Validation Error") diff --git a/model/study.py b/model/study.py index 8f8d289e..1d7e2c0a 100644 --- a/model/study.py +++ b/model/study.py @@ -162,7 +162,7 @@ def to_dict(self): if self.study_description else None, "owner_id": contributors.to_dict()["user_id"], - "role": contributors.to_dict()["role"] + "role": contributors.to_dict()["role"], } @staticmethod From fa534a5b550615158a83cdedab8b9fb3e5c0961a Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 09:14:53 -0700 Subject: [PATCH 218/505] fix: contributors get return --- model/invited_study_contributor.py | 7 +++---- model/study_contributor.py | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index f1f6073b..9f6609bb 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -25,8 +25,7 @@ def __init__(self, study, email_address, permission): def to_dict(self): return { - "study_id": self.study.id, - "email_address": self.email_address, - "permission": self.permission, - "invited_on": self.invited_on, + "id": self.email_address, + "status": "invited", + "role": self.permission, } diff --git a/model/study_contributor.py b/model/study_contributor.py index 3bbd866f..cf368ed9 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -29,7 +29,7 @@ def to_dict(self): "email_address": self.user.email_address, "orcid": self.user.user_details.orcid if self.user.user_details else None, "role": self.permission, - "status": None, + "status": "active", } @staticmethod From fee01d888d94132fe10e27532f887c2d0355dff8 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 10:03:51 -0700 Subject: [PATCH 219/505] fix: contributors GET return --- model/invited_study_contributor.py | 1 + model/study.py | 2 +- model/study_contributor.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 9f6609bb..5941c574 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -28,4 +28,5 @@ def to_dict(self): "id": self.email_address, "status": "invited", "role": self.permission, + "email_address": self.email_address, } diff --git a/model/study.py b/model/study.py index 1d7e2c0a..2c96f17c 100644 --- a/model/study.py +++ b/model/study.py @@ -161,7 +161,7 @@ def to_dict(self): "description": self.study_description.brief_summary if self.study_description else None, - "owner_id": contributors.to_dict()["user_id"], + "owner": contributors.to_dict()["id"], "role": contributors.to_dict()["role"], } diff --git a/model/study_contributor.py b/model/study_contributor.py index cf368ed9..6ba9155b 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -22,7 +22,7 @@ def __init__(self, study, user, permission): def to_dict(self): return { - "user_id": self.user_id, + "id": self.user_id, "name": self.user.user_details.first_name if self.user.user_details else None, From 535807ea800753a2f70ca521536aab960d3e7c06 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 11:03:42 -0700 Subject: [PATCH 220/505] fix: status set active --- model/study_contributor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/study_contributor.py b/model/study_contributor.py index 6ba9155b..2e9f18c6 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -29,7 +29,7 @@ def to_dict(self): "email_address": self.user.email_address, "orcid": self.user.user_details.orcid if self.user.user_details else None, "role": self.permission, - "status": "active", + "status": "accepted", } @staticmethod From 00263703fbdaa2a7da6fab10c3ca44783ae7233a Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 15:35:58 -0700 Subject: [PATCH 221/505] fix: invite contributors --- apis/authentication.py | 2 +- apis/contributor.py | 69 +++++++++++++++++------------- model/invited_study_contributor.py | 2 +- model/study.py | 7 +-- model/study_contributor.py | 2 +- 5 files changed, 46 insertions(+), 36 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 27f06fb8..76ebcb38 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -80,7 +80,7 @@ def post(self): { "user": user.id, "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=20), + + datetime.timedelta(minutes=200), "jti": str(uuid.uuid4()), }, config.secret, diff --git a/apis/contributor.py b/apis/contributor.py index a2aab0e1..e7b72bee 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -122,43 +122,52 @@ def put(self, study_id: int, user_id: int): @api.doc("contributor delete") @api.response(200, "Success") @api.response(400, "Validation Error") - def delete(self, study_id: int, user_id: int): + def delete(self, study_id: int, user_id: str): study = Study.query.get(study_id) - if not is_granted("delete_contributors", study): + if not is_granted("delete_contributor", study): return ( "Access denied, you are not authorized to change this permission", 403, ) - user = User.query.get(user_id) - grantee = StudyContributor.query.filter( - StudyContributor.user == user, StudyContributor.study == study - ).first() - granter = StudyContributor.query.filter( - StudyContributor.user == g.user, StudyContributor.study == study - ).first() - # Order should go from the least privileged to the most privileged - grants = OrderedDict() - grants["viewer"] = [] - grants["editor"] = ["viewer"] - grants["admin"] = ["viewer", "editor"] - grants["owner"] = ["editor", "viewer", "admin"] - - # Granter can not downgrade anyone of equal or greater permissions other than themselves - # TODO: Owners downgrading themselves - if user != g.user: - grantee_level = list(grants.keys()).index(grantee.permission) # 2 - granter_level = list(grants.keys()).index(granter.permission) # 2 - if granter_level <= grantee_level: - return ( - f"You are not authorized to delete {grantee.permission}s from study", - 403, - ) - db.session.delete(grantee) + contributors = [] + if "@" in user_id: + invited_contributors = StudyInvitedContributor.query.filter_by(study_id=study_id, + email_address=user_id).first() + db.session.delete(invited_contributors) + else: + user = User.query.get(user_id) + grantee = StudyContributor.query.filter( + StudyContributor.user == user, StudyContributor.study == study + ).first() + contributors.append(grantee) + # invited_contributors = StudyInvitedContributor.query.filter_by(study_id=study_id, + # email_address=user_id).first() + # granter = StudyContributor.query.filter( + # StudyContributor.user == g.user, StudyContributor.study == study + # ).first() + # # Order should go from the least privileged to the most privileged + # grants = OrderedDict() + # grants["viewer"] = [] + # grants["editor"] = ["viewer"] + # grants["admin"] = ["viewer", "editor"] + # grants["owner"] = ["editor", "viewer", "admin"] + # # Granter can not downgrade anyone of equal or greater permissions other than themselves + # if user != g.user: + # grantee_level = list(grants.keys()).index(grantee.permission) # 2 + # granter_level = list(grants.keys()).index(granter.permission) # 2 + # if granter_level <= grantee_level: + # return ( + # f"You are not authorized to delete {grantee.permission}s from study", + # 403, + # ) + db.session.delete(grantee) db.session.commit() - contributors = StudyContributor.query.filter( - StudyContributor.study == study + contributors = StudyContributor.query.filter_by(study_id=study_id).all() + invited_contributors = StudyInvitedContributor.query.filter_by( + study_id=study_id ).all() - return [contributor.to_dict() for contributor in contributors], 200 + return invited_contributors if "@" in user_id else contributors + @api.route("/study//owner/") diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 5941c574..b397fada 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -25,7 +25,7 @@ def __init__(self, study, email_address, permission): def to_dict(self): return { - "id": self.email_address, + "user_id": self.email_address, "status": "invited", "role": self.permission, "email_address": self.email_address, diff --git a/model/study.py b/model/study.py index 2c96f17c..a352866b 100644 --- a/model/study.py +++ b/model/study.py @@ -161,7 +161,7 @@ def to_dict(self): "description": self.study_description.brief_summary if self.study_description else None, - "owner": contributors.to_dict()["id"], + "owner_id": contributors.to_dict()["user_id"], "role": contributors.to_dict()["role"], } @@ -194,13 +194,14 @@ def touch(self): def add_user_to_study(self, user, permission): contributor = self.study_contributors.filter( model.StudyContributor.user_id == user.id - ) + ).all() + print("start", contributor, "dgfdrggdgdrfgbd") if contributor: raise StudyException("User is already exists in study") else: contributor = model.StudyContributor(self, user, permission) db.session.add(contributor) - return contributor + return contributor def invite_user_to_study(self, email_address, permission): invited_contributor = self.invited_contributors.filter( diff --git a/model/study_contributor.py b/model/study_contributor.py index 2e9f18c6..4ff0ec2a 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -22,7 +22,7 @@ def __init__(self, study, user, permission): def to_dict(self): return { - "id": self.user_id, + "user_id": self.user_id, "name": self.user.user_details.first_name if self.user.user_details else None, From b1af8ac284e72e785ff8d002760be7ddb9c74863 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 5 Oct 2023 22:37:03 +0000 Subject: [PATCH 222/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index e7b72bee..852d17e4 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -131,8 +131,9 @@ def delete(self, study_id: int, user_id: str): ) contributors = [] if "@" in user_id: - invited_contributors = StudyInvitedContributor.query.filter_by(study_id=study_id, - email_address=user_id).first() + invited_contributors = StudyInvitedContributor.query.filter_by( + study_id=study_id, email_address=user_id + ).first() db.session.delete(invited_contributors) else: user = User.query.get(user_id) @@ -169,7 +170,6 @@ def delete(self, study_id: int, user_id: str): return invited_contributors if "@" in user_id else contributors - @api.route("/study//owner/") class AssignOwner(Resource): @api.doc("contributor update") From 87c2f620e6d3699a9513027eb9a83a1524340d1e Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 15:44:28 -0700 Subject: [PATCH 223/505] fix: delete contributors endpoint --- apis/contributor.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index e7b72bee..26146b0c 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -134,6 +134,7 @@ def delete(self, study_id: int, user_id: str): invited_contributors = StudyInvitedContributor.query.filter_by(study_id=study_id, email_address=user_id).first() db.session.delete(invited_contributors) + contributors.append(invited_contributors) else: user = User.query.get(user_id) grantee = StudyContributor.query.filter( @@ -162,11 +163,7 @@ def delete(self, study_id: int, user_id: str): # ) db.session.delete(grantee) db.session.commit() - contributors = StudyContributor.query.filter_by(study_id=study_id).all() - invited_contributors = StudyInvitedContributor.query.filter_by( - study_id=study_id - ).all() - return invited_contributors if "@" in user_id else contributors + return [c.to_dict() for c in contributors] From 2437f286138a8c5ee2610a9ea8a101610aaaf92b Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 16:23:50 -0700 Subject: [PATCH 224/505] fix: id for contributor fixed --- apis/contributor.py | 9 +++------ model/invited_study_contributor.py | 2 +- model/study.py | 2 +- model/study_contributor.py | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 7b3cb859..9d6ab630 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -164,17 +164,16 @@ def delete(self, study_id: int, user_id: str): # ) db.session.delete(grantee) db.session.commit() - return [c.to_dict() for c in contributors] + return 204 -@api.route("/study//owner/") +@api.route("/study//contributor/owner/") class AssignOwner(Resource): @api.doc("contributor update") @api.response(200, "Success") @api.response(400, "Validation Error") @api.expect(contributors_model) def put(self, study_id: int, user_id: int): - data = request.json """set owner based on the assigned permissions""" study = Study.query.get(study_id) if not is_granted("make_owner", study): @@ -182,8 +181,6 @@ def put(self, study_id: int, user_id: int): "Access denied, you are not authorized to change this permission", 403, ) - if not data["role"] == "owner": - return "you can assign only owner", 403 user = User.query.get(user_id) existing_contributor = StudyContributor.query.filter( StudyContributor.user == user, @@ -195,4 +192,4 @@ def put(self, study_id: int, user_id: int): ).first() existing_owner.permission = "admin" db.session.commit() - return existing_contributor.to_dict(), 200 + return 204 diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index b397fada..5941c574 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -25,7 +25,7 @@ def __init__(self, study, email_address, permission): def to_dict(self): return { - "user_id": self.email_address, + "id": self.email_address, "status": "invited", "role": self.permission, "email_address": self.email_address, diff --git a/model/study.py b/model/study.py index a352866b..ce7f85d6 100644 --- a/model/study.py +++ b/model/study.py @@ -161,7 +161,7 @@ def to_dict(self): "description": self.study_description.brief_summary if self.study_description else None, - "owner_id": contributors.to_dict()["user_id"], + "owner_id": contributors.to_dict()["id"], "role": contributors.to_dict()["role"], } diff --git a/model/study_contributor.py b/model/study_contributor.py index 4ff0ec2a..2e9f18c6 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -22,7 +22,7 @@ def __init__(self, study, user, permission): def to_dict(self): return { - "user_id": self.user_id, + "id": self.user_id, "name": self.user.user_details.first_name if self.user.user_details else None, From bbef3d16130e81feeb4d59e9e9a32f9f917bcd7f Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 16:33:36 -0700 Subject: [PATCH 225/505] fix: contributor delete permissions --- apis/contributor.py | 43 ++++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 9d6ab630..70425c9d 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -124,7 +124,8 @@ def put(self, study_id: int, user_id: int): @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: str): study = Study.query.get(study_id) - if not is_granted("delete_contributor", study): + user = User.query.get(user_id) + if not is_granted("delete_contributor", study) and user != g.user: return ( "Access denied, you are not authorized to change this permission", 403, @@ -142,26 +143,26 @@ def delete(self, study_id: int, user_id: str): StudyContributor.user == user, StudyContributor.study == study ).first() contributors.append(grantee) - # invited_contributors = StudyInvitedContributor.query.filter_by(study_id=study_id, - # email_address=user_id).first() - # granter = StudyContributor.query.filter( - # StudyContributor.user == g.user, StudyContributor.study == study - # ).first() - # # Order should go from the least privileged to the most privileged - # grants = OrderedDict() - # grants["viewer"] = [] - # grants["editor"] = ["viewer"] - # grants["admin"] = ["viewer", "editor"] - # grants["owner"] = ["editor", "viewer", "admin"] - # # Granter can not downgrade anyone of equal or greater permissions other than themselves - # if user != g.user: - # grantee_level = list(grants.keys()).index(grantee.permission) # 2 - # granter_level = list(grants.keys()).index(granter.permission) # 2 - # if granter_level <= grantee_level: - # return ( - # f"You are not authorized to delete {grantee.permission}s from study", - # 403, - # ) + invited_contributors = StudyInvitedContributor.query.filter_by(study_id=study_id, + email_address=user_id).first() + granter = StudyContributor.query.filter( + StudyContributor.user == g.user, StudyContributor.study == study + ).first() + # Order should go from the least privileged to the most privileged + grants = OrderedDict() + grants["viewer"] = [] + grants["editor"] = ["viewer"] + grants["admin"] = ["viewer", "editor"] + grants["owner"] = ["editor", "viewer", "admin"] + # Granter can not downgrade anyone of equal or greater permissions other than themselves + if user != g.user: + grantee_level = list(grants.keys()).index(grantee.permission) # 2 + granter_level = list(grants.keys()).index(granter.permission) # 2 + if granter_level <= grantee_level: + return ( + f"You are not authorized to delete {grantee.permission}s from study", + 403, + ) db.session.delete(grantee) db.session.commit() return 204 From 658cdd2d355c92a1fcd75dcf772b3f53b42a22bb Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 5 Oct 2023 23:34:14 +0000 Subject: [PATCH 226/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 70425c9d..f0ffc93b 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -143,8 +143,9 @@ def delete(self, study_id: int, user_id: str): StudyContributor.user == user, StudyContributor.study == study ).first() contributors.append(grantee) - invited_contributors = StudyInvitedContributor.query.filter_by(study_id=study_id, - email_address=user_id).first() + invited_contributors = StudyInvitedContributor.query.filter_by( + study_id=study_id, email_address=user_id + ).first() granter = StudyContributor.query.filter( StudyContributor.user == g.user, StudyContributor.study == study ).first() From a2a401782428af12105f2bf2873006eecbc35f79 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 5 Oct 2023 16:43:27 -0700 Subject: [PATCH 227/505] fix: permission PUT update --- apis/contributor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/contributor.py b/apis/contributor.py index 70425c9d..8deb9852 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -97,7 +97,7 @@ def put(self, study_id: int, user_id: int): grants = OrderedDict() grants["viewer"] = [] grants["editor"] = ["viewer"] - grants["admin"] = ["viewer", "editor"] + grants["admin"] = ["viewer", "editor", "admin"] grants["owner"] = ["editor", "viewer", "admin"] can_grant = permission in grants[granter.permission] From 2a52ac1e934a0edc7deb7c0debaf5e6de5b569aa Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 6 Oct 2023 00:18:20 -0700 Subject: [PATCH 228/505] feat: visibility of study when invited users signup --- apis/authentication.py | 15 +++++++++------ apis/contributor.py | 2 ++ model/study.py | 8 +++++++- model/user.py | 3 +++ 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 76ebcb38..fc5f6774 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -3,7 +3,7 @@ from model import StudyContributor from datetime import timezone import datetime -from model import db, User, TokenBlacklist, Study +from model import db, User, TokenBlacklist, Study, StudyInvitedContributor import jwt import config import uuid @@ -44,13 +44,16 @@ def post(self): if not data["email_address"]: raise "Email is not found" user = User.query.filter_by(email_address=data["email_address"]).one_or_none() + print(user, "uudzusd") if user: return "This email address is already in use", 409 - # user = User.query.filter_by(username=data["username"]).one_or_none() - # if user: - # return "This username is already in use", 409 + is_invited = StudyInvitedContributor.query.filter_by(email_address=data["email_address"]).one_or_none() user_add = User.from_data(data) - # user.user_details.update(data) + if is_invited: + study = Study.query.filter_by(id=is_invited.study_id).first() + contributor_add = study.add_invited_to_contributor(user_add, is_invited.permission) + db.session.add(contributor_add) + db.session.delete(is_invited) db.session.add(user_add) db.session.commit() return f"Hi, {user_add.email_address}, you have successfully signed up", 201 @@ -155,6 +158,7 @@ def is_granted(permission: str, study=None): "participant", "study_metadata", "dataset_metadata", + "make_owner" ], "admin": [ "admin", @@ -171,7 +175,6 @@ def is_granted(permission: str, study=None): "participant", "study_metadata", "dataset_metadata", - "make_owner", ], "editor": [ "editor", diff --git a/apis/contributor.py b/apis/contributor.py index 0d0da4b8..e069eddd 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -188,10 +188,12 @@ def put(self, study_id: int, user_id: int): StudyContributor.user == user, StudyContributor.study == study, ).first() + existing_contributor.permission = "owner" existing_owner = StudyContributor.query.filter( StudyContributor.study == study, StudyContributor.permission == "owner" ).first() + existing_owner.permission = "admin" db.session.commit() return 204 diff --git a/model/study.py b/model/study.py index ce7f85d6..55d69055 100644 --- a/model/study.py +++ b/model/study.py @@ -192,10 +192,10 @@ def touch(self): self.updated_on = datetime.datetime.now(timezone.utc).timestamp() def add_user_to_study(self, user, permission): + """add user to study """ contributor = self.study_contributors.filter( model.StudyContributor.user_id == user.id ).all() - print("start", contributor, "dgfdrggdgdrfgbd") if contributor: raise StudyException("User is already exists in study") else: @@ -217,3 +217,9 @@ def invite_user_to_study(self, email_address, permission): ) db.session.add(contributor_add) return contributor_add + + def add_invited_to_contributor(self, user, permission): + """add invited users to contributor""" + contributor = model.StudyContributor(self, user, permission) + db.session.add(contributor) + return contributor diff --git a/model/user.py b/model/user.py index ca793497..7c4019ef 100644 --- a/model/user.py +++ b/model/user.py @@ -60,3 +60,6 @@ def check_password(self, password): hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") is_valid = app.bcrypt.check_password_hash(hashed_password, password) return is_valid + + def add_study(self, user, permission): + contributor = model.StudyContributor(self, permission) From 5a3a72cee7de5d79489ae1c97ab9d9514bd5b586 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 6 Oct 2023 12:20:20 -0700 Subject: [PATCH 229/505] feat: fix study get correct owner --- apis/contributor.py | 2 ++ apis/study.py | 2 ++ model/invited_study_contributor.py | 5 ++++- model/study.py | 3 +-- model/study_contributor.py | 9 ++++++--- 5 files changed, 15 insertions(+), 6 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index e069eddd..fdd91f8e 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -43,6 +43,8 @@ def get(self, study_id: int): contributors_list = [c.to_dict() for c in contributors] + [ c.to_dict() for c in invited_contributors ] + c = [c.to_dict() for c in contributors] + print(c, "contributor get") return contributors_list @api.response(200, "Success") diff --git a/apis/study.py b/apis/study.py index 926caf86..b2070030 100644 --- a/apis/study.py +++ b/apis/study.py @@ -34,6 +34,8 @@ def get(self): study_ids = [contributor.study_id for contributor in study_contributors] studies = Study.query.filter(Study.id.in_(study_ids)).all() + s = [s.to_dict()["owner_id"] for s in studies] + print(s, "owwwwneeeeeeeeerrrrr") return [s.to_dict() for s in studies] @api.expect(study_model) diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 5941c574..03b306fc 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -12,11 +12,14 @@ def __init__(self, study, email_address, permission): self.permission = permission self.invited_on = datetime.datetime.now(timezone.utc).timestamp() self.email_address = email_address - + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.token = "" __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) invited_on = db.Column(db.BigInteger, nullable=False) + token = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column( db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), primary_key=True diff --git a/model/study.py b/model/study.py index 55d69055..6d650fff 100644 --- a/model/study.py +++ b/model/study.py @@ -4,7 +4,6 @@ import model from .db import db import datetime -from flask import g class StudyException(Exception): @@ -147,7 +146,7 @@ def __init__(self): ) def to_dict(self): - contributors = model.StudyContributor.query.filter( + contributors = self.study_contributors.filter( model.StudyContributor.permission == "owner" ).first() """Converts the study to a dictionary""" diff --git a/model/study_contributor.py b/model/study_contributor.py index 2e9f18c6..a84c5650 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,5 +1,6 @@ -import uuid - +from datetime import datetime +from datetime import timezone +import datetime from .db import db @@ -8,10 +9,12 @@ def __init__(self, study, user, permission): self.study = study self.user = user self.permission = permission - + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) + created_at = db.Column(db.CHAR(36), nullable=False) + user = db.relationship( "User", back_populates="study_contributors", From 5f175939d5bafb2efca540dfba55ab7438fb0985 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 6 Oct 2023 19:21:26 +0000 Subject: [PATCH 230/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 10 +++++++--- model/invited_study_contributor.py | 1 + model/study.py | 2 +- model/study_contributor.py | 1 + 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index fc5f6774..0e0c19d9 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -47,11 +47,15 @@ def post(self): print(user, "uudzusd") if user: return "This email address is already in use", 409 - is_invited = StudyInvitedContributor.query.filter_by(email_address=data["email_address"]).one_or_none() + is_invited = StudyInvitedContributor.query.filter_by( + email_address=data["email_address"] + ).one_or_none() user_add = User.from_data(data) if is_invited: study = Study.query.filter_by(id=is_invited.study_id).first() - contributor_add = study.add_invited_to_contributor(user_add, is_invited.permission) + contributor_add = study.add_invited_to_contributor( + user_add, is_invited.permission + ) db.session.add(contributor_add) db.session.delete(is_invited) db.session.add(user_add) @@ -158,7 +162,7 @@ def is_granted(permission: str, study=None): "participant", "study_metadata", "dataset_metadata", - "make_owner" + "make_owner", ], "admin": [ "admin", diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 03b306fc..aa727f1d 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -14,6 +14,7 @@ def __init__(self, study, email_address, permission): self.email_address = email_address self.created_at = datetime.datetime.now(timezone.utc).timestamp() self.token = "" + __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) diff --git a/model/study.py b/model/study.py index 6d650fff..6b290924 100644 --- a/model/study.py +++ b/model/study.py @@ -191,7 +191,7 @@ def touch(self): self.updated_on = datetime.datetime.now(timezone.utc).timestamp() def add_user_to_study(self, user, permission): - """add user to study """ + """add user to study""" contributor = self.study_contributors.filter( model.StudyContributor.user_id == user.id ).all() diff --git a/model/study_contributor.py b/model/study_contributor.py index a84c5650..23ff3e91 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -10,6 +10,7 @@ def __init__(self, study, user, permission): self.user = user self.permission = permission self.created_at = datetime.datetime.now(timezone.utc).timestamp() + __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) From 3d962e691ef9ab317722df93a15f4dd4501f3b1f Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 6 Oct 2023 12:36:12 -0700 Subject: [PATCH 231/505] feat: study get ownerkey --- apis/study.py | 14 -------------- model/study.py | 2 +- 2 files changed, 1 insertion(+), 15 deletions(-) diff --git a/apis/study.py b/apis/study.py index b2070030..867eb490 100644 --- a/apis/study.py +++ b/apis/study.py @@ -95,17 +95,3 @@ def delete(self, study_id: int): Study.study_contributors.any(User.id == g.user.id) ).all() return [s.to_dict() for s in studies], 201 - - -# @api.route("/view-profile", methods=["GET"]) -# def viewProfile(): -# return jsonify(dic) -# - - -# @study.route("/view-profile", methods=["POST"]) -# def update_user_profile(): -# data = request.json -# if data is not None: -# return jsonify(data), 201 -# diff --git a/model/study.py b/model/study.py index 6d650fff..fe90f171 100644 --- a/model/study.py +++ b/model/study.py @@ -160,7 +160,7 @@ def to_dict(self): "description": self.study_description.brief_summary if self.study_description else None, - "owner_id": contributors.to_dict()["id"], + "owner": contributors.to_dict()["id"], "role": contributors.to_dict()["role"], } From 61d165b71fed7e3cebd9938797853e91db894ea3 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 6 Oct 2023 14:00:42 -0700 Subject: [PATCH 232/505] fix: password authentication --- apis/__init__.py | 1 + apis/authentication.py | 1 - apis/contributor.py | 2 -- apis/study.py | 2 -- model/user.py | 3 ++- 5 files changed, 3 insertions(+), 6 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 749db876..b7515847 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -9,6 +9,7 @@ from .dataset import api as dataset_api from .participant import api as participants_api from .study import api as study_api +from .user import api as user from .authentication import api as authentication diff --git a/apis/authentication.py b/apis/authentication.py index 0e0c19d9..f40483f6 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -44,7 +44,6 @@ def post(self): if not data["email_address"]: raise "Email is not found" user = User.query.filter_by(email_address=data["email_address"]).one_or_none() - print(user, "uudzusd") if user: return "This email address is already in use", 409 is_invited = StudyInvitedContributor.query.filter_by( diff --git a/apis/contributor.py b/apis/contributor.py index fdd91f8e..e069eddd 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -43,8 +43,6 @@ def get(self, study_id: int): contributors_list = [c.to_dict() for c in contributors] + [ c.to_dict() for c in invited_contributors ] - c = [c.to_dict() for c in contributors] - print(c, "contributor get") return contributors_list @api.response(200, "Success") diff --git a/apis/study.py b/apis/study.py index 867eb490..932cb786 100644 --- a/apis/study.py +++ b/apis/study.py @@ -34,8 +34,6 @@ def get(self): study_ids = [contributor.study_id for contributor in study_contributors] studies = Study.query.filter(Study.id.in_(study_ids)).all() - s = [s.to_dict()["owner_id"] for s in studies] - print(s, "owwwwneeeeeeeeerrrrr") return [s.to_dict() for s in studies] @api.expect(study_model) diff --git a/model/user.py b/model/user.py index 7c4019ef..31bafed2 100644 --- a/model/user.py +++ b/model/user.py @@ -58,7 +58,8 @@ def check_password(self, password): """validates password and bcrypt hashed password""" # TODO check password length and make uppercase letter hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") - is_valid = app.bcrypt.check_password_hash(hashed_password, password) + is_valid = app.bcrypt.check_password_hash(hashed_password, self.hash) + print(is_valid) return is_valid def add_study(self, user, permission): From 13d49dff6aed82be29747793cdf4c52a29325fdf Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 6 Oct 2023 15:47:58 -0700 Subject: [PATCH 233/505] fix: login password check --- model/user.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/model/user.py b/model/user.py index 31bafed2..e98b69ab 100644 --- a/model/user.py +++ b/model/user.py @@ -57,10 +57,6 @@ def set_password(self, password, data): def check_password(self, password): """validates password and bcrypt hashed password""" # TODO check password length and make uppercase letter - hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") - is_valid = app.bcrypt.check_password_hash(hashed_password, self.hash) - print(is_valid) + app.bcrypt.generate_password_hash(password).decode("utf-8") + is_valid = app.bcrypt.check_password_hash(self.hash, password) return is_valid - - def add_study(self, user, permission): - contributor = model.StudyContributor(self, permission) From 48783b7b1f46866612cffc5cd67473d6c34065de Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 6 Oct 2023 16:31:35 -0700 Subject: [PATCH 234/505] feat: fix study get correct owner --- model/study.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/model/study.py b/model/study.py index cbd9243c..543f8ba0 100644 --- a/model/study.py +++ b/model/study.py @@ -4,7 +4,7 @@ import model from .db import db import datetime - +from flask import g class StudyException(Exception): pass @@ -146,9 +146,13 @@ def __init__(self): ) def to_dict(self): - contributors = self.study_contributors.filter( + owner_contributors = self.study_contributors.filter( model.StudyContributor.permission == "owner" ).first() + user = model.User.query.get(g.user.id) + contributor_permission = self.study_contributors.filter( + model.StudyContributor.user_id == user.id + ).first() """Converts the study to a dictionary""" return { "id": self.id, @@ -158,10 +162,9 @@ def to_dict(self): "updated_on": self.updated_on, "size": self.study_other.size if self.study_other else None, "description": self.study_description.brief_summary - if self.study_description - else None, - "owner": contributors.to_dict()["id"], - "role": contributors.to_dict()["role"], + if self.study_description else None, + "owner": owner_contributors.to_dict()["id"] if owner_contributors else None, + "role": contributor_permission.to_dict()["role"] } @staticmethod From dc654be7cb53d3ba7456e83033ff276b531bd02f Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 6 Oct 2023 23:32:12 +0000 Subject: [PATCH 235/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/study.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/model/study.py b/model/study.py index 543f8ba0..c61f7118 100644 --- a/model/study.py +++ b/model/study.py @@ -6,6 +6,7 @@ import datetime from flask import g + class StudyException(Exception): pass @@ -151,7 +152,7 @@ def to_dict(self): ).first() user = model.User.query.get(g.user.id) contributor_permission = self.study_contributors.filter( - model.StudyContributor.user_id == user.id + model.StudyContributor.user_id == user.id ).first() """Converts the study to a dictionary""" return { @@ -162,9 +163,10 @@ def to_dict(self): "updated_on": self.updated_on, "size": self.study_other.size if self.study_other else None, "description": self.study_description.brief_summary - if self.study_description else None, + if self.study_description + else None, "owner": owner_contributors.to_dict()["id"] if owner_contributors else None, - "role": contributor_permission.to_dict()["role"] + "role": contributor_permission.to_dict()["role"], } @staticmethod From 9702c798194d3497e13db40405e7acccf592ec1b Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 6 Oct 2023 16:43:26 -0700 Subject: [PATCH 236/505] feat: added user --- apis/__init__.py | 1 + apis/user.py | 26 ++++++++++++++++++++++++++ model/study.py | 3 ++- 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 apis/user.py diff --git a/apis/__init__.py b/apis/__init__.py index b7515847..bcf98176 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -82,3 +82,4 @@ def get(self): api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) +api.add_namespace(user) diff --git a/apis/user.py b/apis/user.py new file mode 100644 index 00000000..652c45e0 --- /dev/null +++ b/apis/user.py @@ -0,0 +1,26 @@ +from flask import request, g +from flask_restx import Namespace, Resource, fields + +from model import Study, db, User, StudyContributor +from .authentication import is_granted + +api = Namespace("User", description="User tables", path="/") + + +study_model = api.model( + "User", + { + + }, +) + + +@api.route("/profile") +class User(Resource): + @api.doc("list_study") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(study_model) + def get(self): + """this code returns user details""" + return User.query.all() diff --git a/model/study.py b/model/study.py index 543f8ba0..ae44d2d6 100644 --- a/model/study.py +++ b/model/study.py @@ -151,8 +151,9 @@ def to_dict(self): ).first() user = model.User.query.get(g.user.id) contributor_permission = self.study_contributors.filter( - model.StudyContributor.user_id == user.id + model.StudyContributor.user_id == g.user.id ).first() + print(contributor_permission) """Converts the study to a dictionary""" return { "id": self.id, From b6ebe71f78a1c35b12840feb6646769ecd5cd15c Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 6 Oct 2023 23:44:54 +0000 Subject: [PATCH 237/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/user.py | 4 +--- model/study.py | 8 +++++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/apis/user.py b/apis/user.py index 652c45e0..63c5149f 100644 --- a/apis/user.py +++ b/apis/user.py @@ -9,9 +9,7 @@ study_model = api.model( "User", - { - - }, + {}, ) diff --git a/model/study.py b/model/study.py index ae44d2d6..eb1b2fbc 100644 --- a/model/study.py +++ b/model/study.py @@ -6,6 +6,7 @@ import datetime from flask import g + class StudyException(Exception): pass @@ -151,7 +152,7 @@ def to_dict(self): ).first() user = model.User.query.get(g.user.id) contributor_permission = self.study_contributors.filter( - model.StudyContributor.user_id == g.user.id + model.StudyContributor.user_id == g.user.id ).first() print(contributor_permission) """Converts the study to a dictionary""" @@ -163,9 +164,10 @@ def to_dict(self): "updated_on": self.updated_on, "size": self.study_other.size if self.study_other else None, "description": self.study_description.brief_summary - if self.study_description else None, + if self.study_description + else None, "owner": owner_contributors.to_dict()["id"] if owner_contributors else None, - "role": contributor_permission.to_dict()["role"] + "role": contributor_permission.to_dict()["role"], } @staticmethod From 83de89f6333877cbc05516da680484806ae136ef Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 10:17:28 -0700 Subject: [PATCH 238/505] fix: update contributor DELETE to not delete if 1 user is left --- apis/authentication.py | 28 ++++++------ apis/contributor.py | 87 +++++++++++++++++++++----------------- model/study_contributor.py | 2 +- 3 files changed, 63 insertions(+), 54 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index f40483f6..7077573d 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -7,7 +7,7 @@ import jwt import config import uuid - +import re api = Namespace("Authentication", description="Authentication paths", path="/") signup_model = api.model( @@ -41,25 +41,23 @@ def post(self): """signs up the new users and saves data in DB""" data = request.json # TODO data[email doesnt exist then raise error; json validation library - if not data["email_address"]: - raise "Email is not found" + pattern = r'^[\w\.-]+@[\w\.-]+\.\w+$' + if not data["email_address"] or not re.match(pattern, data["email_address"]): + return "Email address is invalid", 422 user = User.query.filter_by(email_address=data["email_address"]).one_or_none() if user: return "This email address is already in use", 409 - is_invited = StudyInvitedContributor.query.filter_by( + invitations = StudyInvitedContributor.query.filter_by( email_address=data["email_address"] - ).one_or_none() - user_add = User.from_data(data) - if is_invited: - study = Study.query.filter_by(id=is_invited.study_id).first() - contributor_add = study.add_invited_to_contributor( - user_add, is_invited.permission - ) - db.session.add(contributor_add) - db.session.delete(is_invited) - db.session.add(user_add) + ).all() + new_user = User.from_data(data) + for invite in invitations: + invite.study.add_user_to_study( + new_user, invite.permission) + db.session.delete(invite) + db.session.add(new_user) db.session.commit() - return f"Hi, {user_add.email_address}, you have successfully signed up", 201 + return f"Hi, {new_user.email_address}, you have successfully signed up", 201 @api.route("/auth/login") diff --git a/apis/contributor.py b/apis/contributor.py index e069eddd..c36052c6 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -77,6 +77,8 @@ class ContributorResource(Resource): def put(self, study_id: int, user_id: int): """update contributor based on the assigned permissions""" study = Study.query.get(study_id) + if not study: + return "study is not found", 404 if not is_granted("permission", study): return ( "Access denied, you are not authorized to change this permission", @@ -84,6 +86,8 @@ def put(self, study_id: int, user_id: int): ) data = request.json user = User.query.get(user_id) + if not user: + return "user not found", 404 permission = data["role"] grantee = StudyContributor.query.filter( StudyContributor.user == user, StudyContributor.study == study @@ -124,47 +128,55 @@ def put(self, study_id: int, user_id: int): @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: str): study = Study.query.get(study_id) - user = User.query.get(user_id) - if not is_granted("delete_contributor", study) and user != g.user: - return ( - "Access denied, you are not authorized to change this permission", - 403, - ) - contributors = [] + if not study: + return "study is not found", 404 + granter = StudyContributor.query.filter( + StudyContributor.user == g.user, StudyContributor.study == study + ).first() + if not granter: + return "you are not contributor of this study", 403 + grants = OrderedDict() + grants["viewer"] = [] + grants["editor"] = [] + grants["admin"] = ["viewer", "editor"] + grants["owner"] = ["editor", "viewer", "admin"] + if "@" in user_id: - invited_contributors = StudyInvitedContributor.query.filter_by( + invited_grantee = StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() - db.session.delete(invited_contributors) - contributors.append(invited_contributors) - else: - user = User.query.get(user_id) - grantee = StudyContributor.query.filter( - StudyContributor.user == user, StudyContributor.study == study - ).first() - contributors.append(grantee) - invited_contributors = StudyInvitedContributor.query.filter_by( - study_id=study_id, email_address=user_id - ).first() - granter = StudyContributor.query.filter( - StudyContributor.user == g.user, StudyContributor.study == study - ).first() - # Order should go from the least privileged to the most privileged - grants = OrderedDict() - grants["viewer"] = [] - grants["editor"] = ["viewer"] - grants["admin"] = ["viewer", "editor"] - grants["owner"] = ["editor", "viewer", "admin"] - # Granter can not downgrade anyone of equal or greater permissions other than themselves - if user != g.user: - grantee_level = list(grants.keys()).index(grantee.permission) # 2 - granter_level = list(grants.keys()).index(granter.permission) # 2 - if granter_level <= grantee_level: - return ( - f"You are not authorized to delete {grantee.permission}s from study", - 403, - ) + can_delete = invited_grantee.permission in grants[granter.permission] + if not can_delete: + return f"User cannot delete {invited_grantee.permission}", 403 + db.session.delete(invited_grantee) + db.session.commit() + return 204 + user = User.query.get(user_id) + if not user: + return "user is not found", 404 + contributors = StudyContributor.query.filter( + StudyContributor.study == study + ).all() + print(len(contributors), "") + grantee = StudyContributor.query.filter( + StudyContributor.user == user, StudyContributor.study == study + ).first() + if len(contributors) <= 1: + return "the study must have at least one contributor", 422 + if grantee.user == granter.user: + if granter.permission == "owner": + return "you must transfer ownership before removing yourself", 422 db.session.delete(grantee) + db.session.commit() + return 204 + if not is_granted("delete_contributor", study): + return ( + "Access denied, you are not authorized to change this permission", + 403) + can_delete = grantee.permission in grants[granter.permission] + if not can_delete: + return f"User cannot delete {grantee.permission}", 403 + db.session.delete(grantee) db.session.commit() return 204 @@ -188,7 +200,6 @@ def put(self, study_id: int, user_id: int): StudyContributor.user == user, StudyContributor.study == study, ).first() - existing_contributor.permission = "owner" existing_owner = StudyContributor.query.filter( StudyContributor.study == study, StudyContributor.permission == "owner" diff --git a/model/study_contributor.py b/model/study_contributor.py index 23ff3e91..055a6bfc 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -14,7 +14,7 @@ def __init__(self, study, user, permission): __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), primary_key=True) - created_at = db.Column(db.CHAR(36), nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) user = db.relationship( "User", From 7d1e3afc6cbef3644cbcacd2a461b3a9e7814a21 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 10:19:16 -0700 Subject: [PATCH 239/505] feat: added error handler hook --- apis/exception.py | 2 ++ app.py | 6 ++++++ model/study.py | 16 ++++++++-------- 3 files changed, 16 insertions(+), 8 deletions(-) create mode 100644 apis/exception.py diff --git a/apis/exception.py b/apis/exception.py new file mode 100644 index 00000000..2e3be022 --- /dev/null +++ b/apis/exception.py @@ -0,0 +1,2 @@ +class ValidationException(Exception): + pass diff --git a/app.py b/app.py index f163d740..611aecae 100644 --- a/app.py +++ b/app.py @@ -1,4 +1,5 @@ """Entry point for the application.""" +from apis.exception import ValidationException from flask import Flask, request, make_response, g import jwt import config @@ -163,6 +164,11 @@ def on_after_request(resp): return resp + @app.errorhandler(ValidationException) + def validation_exception_handler(error): + return error.args[0], 422 + + @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() diff --git a/model/study.py b/model/study.py index ae44d2d6..27c486b8 100644 --- a/model/study.py +++ b/model/study.py @@ -5,7 +5,7 @@ from .db import db import datetime from flask import g - +from apis import exception class StudyException(Exception): pass @@ -146,7 +146,7 @@ def __init__(self): ) def to_dict(self): - owner_contributors = self.study_contributors.filter( + owner = self.study_contributors.filter( model.StudyContributor.permission == "owner" ).first() user = model.User.query.get(g.user.id) @@ -164,7 +164,7 @@ def to_dict(self): "size": self.study_other.size if self.study_other else None, "description": self.study_description.brief_summary if self.study_description else None, - "owner": owner_contributors.to_dict()["id"] if owner_contributors else None, + "owner": owner.to_dict()["id"] if owner else None, "role": contributor_permission.to_dict()["role"] } @@ -178,6 +178,11 @@ def from_data(data: dict): def update(self, data): """Updates the study from a dictionary""" + if not data["title"]: + raise exception.ValidationException("title is required") + if not data["image"]: + raise exception.ValidationException("image is required") + self.title = data["title"] self.image = data["image"] self.updated_on = datetime.datetime.now(timezone.utc).timestamp() @@ -221,8 +226,3 @@ def invite_user_to_study(self, email_address, permission): db.session.add(contributor_add) return contributor_add - def add_invited_to_contributor(self, user, permission): - """add invited users to contributor""" - contributor = model.StudyContributor(self, user, permission) - db.session.add(contributor) - return contributor From f73ba15d904d6efa5127c843dc62c0f027ac0758 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 9 Oct 2023 17:20:48 +0000 Subject: [PATCH 240/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 6 +++--- apis/contributor.py | 3 ++- app.py | 1 - model/study.py | 10 ++++++---- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 7077573d..ff7b437c 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -8,6 +8,7 @@ import config import uuid import re + api = Namespace("Authentication", description="Authentication paths", path="/") signup_model = api.model( @@ -41,7 +42,7 @@ def post(self): """signs up the new users and saves data in DB""" data = request.json # TODO data[email doesnt exist then raise error; json validation library - pattern = r'^[\w\.-]+@[\w\.-]+\.\w+$' + pattern = r"^[\w\.-]+@[\w\.-]+\.\w+$" if not data["email_address"] or not re.match(pattern, data["email_address"]): return "Email address is invalid", 422 user = User.query.filter_by(email_address=data["email_address"]).one_or_none() @@ -52,8 +53,7 @@ def post(self): ).all() new_user = User.from_data(data) for invite in invitations: - invite.study.add_user_to_study( - new_user, invite.permission) + invite.study.add_user_to_study(new_user, invite.permission) db.session.delete(invite) db.session.add(new_user) db.session.commit() diff --git a/apis/contributor.py b/apis/contributor.py index c36052c6..ec463638 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -172,7 +172,8 @@ def delete(self, study_id: int, user_id: str): if not is_granted("delete_contributor", study): return ( "Access denied, you are not authorized to change this permission", - 403) + 403, + ) can_delete = grantee.permission in grants[granter.permission] if not can_delete: return f"User cannot delete {grantee.permission}", 403 diff --git a/app.py b/app.py index 611aecae..8bc7d139 100644 --- a/app.py +++ b/app.py @@ -168,7 +168,6 @@ def on_after_request(resp): def validation_exception_handler(error): return error.args[0], 422 - @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() diff --git a/model/study.py b/model/study.py index 27c486b8..cc28a03a 100644 --- a/model/study.py +++ b/model/study.py @@ -6,6 +6,8 @@ import datetime from flask import g from apis import exception + + class StudyException(Exception): pass @@ -151,7 +153,7 @@ def to_dict(self): ).first() user = model.User.query.get(g.user.id) contributor_permission = self.study_contributors.filter( - model.StudyContributor.user_id == g.user.id + model.StudyContributor.user_id == g.user.id ).first() print(contributor_permission) """Converts the study to a dictionary""" @@ -163,9 +165,10 @@ def to_dict(self): "updated_on": self.updated_on, "size": self.study_other.size if self.study_other else None, "description": self.study_description.brief_summary - if self.study_description else None, + if self.study_description + else None, "owner": owner.to_dict()["id"] if owner else None, - "role": contributor_permission.to_dict()["role"] + "role": contributor_permission.to_dict()["role"], } @staticmethod @@ -225,4 +228,3 @@ def invite_user_to_study(self, email_address, permission): ) db.session.add(contributor_add) return contributor_add - From c5c1996a74f1545cfd1a0ba39fbd9d257d1679a5 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 11:13:14 -0700 Subject: [PATCH 241/505] feat: added alembic migrations --- .../32e5ff331a78_add_token_blacklist.py | 26 +++++++++++++++++++ .../639a13561089_delete_token_blacklist.py | 26 +++++++++++++++++++ ...d33834_remove_column_in_token_blacklist.py | 26 +++++++++++++++++++ .../versions/e6cc254fc968_token_blacklist.py | 26 +++++++++++++++++++ 4 files changed, 104 insertions(+) create mode 100644 alembic/versions/32e5ff331a78_add_token_blacklist.py create mode 100644 alembic/versions/639a13561089_delete_token_blacklist.py create mode 100644 alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py create mode 100644 alembic/versions/e6cc254fc968_token_blacklist.py diff --git a/alembic/versions/32e5ff331a78_add_token_blacklist.py b/alembic/versions/32e5ff331a78_add_token_blacklist.py new file mode 100644 index 00000000..f2774f1c --- /dev/null +++ b/alembic/versions/32e5ff331a78_add_token_blacklist.py @@ -0,0 +1,26 @@ +"""add token_blacklist + +Revision ID: 32e5ff331a78 +Revises: 639a13561089 +Create Date: 2023-10-09 11:10:06.568148 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '32e5ff331a78' +down_revision: Union[str, None] = '639a13561089' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade(): + op.add_column('token_blacklist', sa.Column('user_id', sa.String, nullable=True)) + + +def downgrade() -> None: + pass diff --git a/alembic/versions/639a13561089_delete_token_blacklist.py b/alembic/versions/639a13561089_delete_token_blacklist.py new file mode 100644 index 00000000..b74a58a0 --- /dev/null +++ b/alembic/versions/639a13561089_delete_token_blacklist.py @@ -0,0 +1,26 @@ +"""delete token_blacklist + +Revision ID: 639a13561089 +Revises: e6cc254fc968 +Create Date: 2023-10-08 23:14:48.882104 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '639a13561089' +down_revision: Union[str, None] = 'e6cc254fc968' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.drop_column('token_blacklist', 'user_id') + + +def downgrade() -> None: + pass diff --git a/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py b/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py new file mode 100644 index 00000000..fa13ff9a --- /dev/null +++ b/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py @@ -0,0 +1,26 @@ +"""remove column in token_blacklist + +Revision ID: 6d4271d33834 +Revises: 32e5ff331a78 +Create Date: 2023-10-09 11:11:58.478289 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '6d4271d33834' +down_revision: Union[str, None] = '32e5ff331a78' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.drop_column('token_blacklist', 'user_id') + + +def downgrade() -> None: + pass diff --git a/alembic/versions/e6cc254fc968_token_blacklist.py b/alembic/versions/e6cc254fc968_token_blacklist.py new file mode 100644 index 00000000..975c972b --- /dev/null +++ b/alembic/versions/e6cc254fc968_token_blacklist.py @@ -0,0 +1,26 @@ +"""token_blacklist + +Revision ID: e6cc254fc968 +Revises: 3e48c46694c8 +Create Date: 2023-10-06 19:40:38.517323 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'e6cc254fc968' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade(): + op.add_column('token_blacklist', sa.Column('user_id', sa.String, nullable=True)) + + +def downgrade() -> None: + pass From 3a79c59567141fe238448fac344331bd18355efa Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 9 Oct 2023 18:14:10 +0000 Subject: [PATCH 242/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- alembic/versions/32e5ff331a78_add_token_blacklist.py | 6 +++--- alembic/versions/639a13561089_delete_token_blacklist.py | 6 +++--- .../6d4271d33834_remove_column_in_token_blacklist.py | 6 +++--- alembic/versions/e6cc254fc968_token_blacklist.py | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/alembic/versions/32e5ff331a78_add_token_blacklist.py b/alembic/versions/32e5ff331a78_add_token_blacklist.py index f2774f1c..0ff84efd 100644 --- a/alembic/versions/32e5ff331a78_add_token_blacklist.py +++ b/alembic/versions/32e5ff331a78_add_token_blacklist.py @@ -12,14 +12,14 @@ # revision identifiers, used by Alembic. -revision: str = '32e5ff331a78' -down_revision: Union[str, None] = '639a13561089' +revision: str = "32e5ff331a78" +down_revision: Union[str, None] = "639a13561089" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade(): - op.add_column('token_blacklist', sa.Column('user_id', sa.String, nullable=True)) + op.add_column("token_blacklist", sa.Column("user_id", sa.String, nullable=True)) def downgrade() -> None: diff --git a/alembic/versions/639a13561089_delete_token_blacklist.py b/alembic/versions/639a13561089_delete_token_blacklist.py index b74a58a0..e964c05b 100644 --- a/alembic/versions/639a13561089_delete_token_blacklist.py +++ b/alembic/versions/639a13561089_delete_token_blacklist.py @@ -12,14 +12,14 @@ # revision identifiers, used by Alembic. -revision: str = '639a13561089' -down_revision: Union[str, None] = 'e6cc254fc968' +revision: str = "639a13561089" +down_revision: Union[str, None] = "e6cc254fc968" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: - op.drop_column('token_blacklist', 'user_id') + op.drop_column("token_blacklist", "user_id") def downgrade() -> None: diff --git a/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py b/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py index fa13ff9a..f783a7b9 100644 --- a/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py +++ b/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py @@ -12,14 +12,14 @@ # revision identifiers, used by Alembic. -revision: str = '6d4271d33834' -down_revision: Union[str, None] = '32e5ff331a78' +revision: str = "6d4271d33834" +down_revision: Union[str, None] = "32e5ff331a78" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: - op.drop_column('token_blacklist', 'user_id') + op.drop_column("token_blacklist", "user_id") def downgrade() -> None: diff --git a/alembic/versions/e6cc254fc968_token_blacklist.py b/alembic/versions/e6cc254fc968_token_blacklist.py index 975c972b..4fa8dcd1 100644 --- a/alembic/versions/e6cc254fc968_token_blacklist.py +++ b/alembic/versions/e6cc254fc968_token_blacklist.py @@ -12,14 +12,14 @@ # revision identifiers, used by Alembic. -revision: str = 'e6cc254fc968' +revision: str = "e6cc254fc968" down_revision: Union[str, None] = None branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None def upgrade(): - op.add_column('token_blacklist', sa.Column('user_id', sa.String, nullable=True)) + op.add_column("token_blacklist", sa.Column("user_id", sa.String, nullable=True)) def downgrade() -> None: From 4fb565d43f7bf8baabc5c0bb2b04594def99e271 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 12:07:01 -0700 Subject: [PATCH 243/505] feat: added Alembic README --- alembic/README.md | 35 +++++++++++++++++++++++++++++++++++ alembic/script.py.mako | 26 ++++++++++++++++++++++++++ 2 files changed, 61 insertions(+) create mode 100644 alembic/README.md create mode 100644 alembic/script.py.mako diff --git a/alembic/README.md b/alembic/README.md new file mode 100644 index 00000000..d6f6d40a --- /dev/null +++ b/alembic/README.md @@ -0,0 +1,35 @@ +### Setup Alembic + + +Alembic leverages SQLAlchemy as its underlying engine to facilitate the creation, +management, and execution of change management scripts for relational databases. +To set up Alembic for migration management, please follow the instructions below. + + +1.Install alembic + ``` + pip install Flask-Alembic + ``` + +2.Create new revision +``` +alembic revision -m "create account table" +``` + +3.Make a revision in the ```create account table``` file + +``` +def upgrade(): + pass +``` + +4.After all revisions completed, run the migration + +``` +alembic upgrade head +``` + +5.Running our Second Migration +``` +alembic revision -m "Add a column" +``` diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 00000000..fbc4b07d --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} From d4a20774601417571f0d48c237a62a26821c8123 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 13:22:30 -0700 Subject: [PATCH 244/505] fix: added Alembic README --- alembic/README.md | 62 +++++++++++++++++++++++++++++------------------ 1 file changed, 38 insertions(+), 24 deletions(-) diff --git a/alembic/README.md b/alembic/README.md index d6f6d40a..02bd813b 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -1,35 +1,49 @@ ### Setup Alembic +To set up Alembic migration management, please follow the instructions below. -Alembic leverages SQLAlchemy as its underlying engine to facilitate the creation, -management, and execution of change management scripts for relational databases. -To set up Alembic for migration management, please follow the instructions below. - - -1.Install alembic - ``` - pip install Flask-Alembic - ``` - -2.Create new revision +1.Create new revision ``` alembic revision -m "create account table" ``` -3.Make a revision in the ```create account table``` file - -``` -def upgrade(): - pass -``` - -4.After all revisions completed, run the migration +This will set a new directory ```alembic``` containing ``` versions``` +which contains all the revised files. +Then, a new file ```1975ea83b712_create_account_table.py``` is generated. + + +2.After creating new file we can include all needed revisions in this file + + create account table + Revision ID: 1975ea83b712 + Revises: + Create Date: 2011-11-08 11:40:27.089406 + + # revision identifiers, used by Alembic. + revision = '1975ea83b712' + down_revision = None + branch_labels = None + + from alembic import op + import sqlalchemy as sa + + def upgrade(): + pass + + +* ```down_revision``` runs an operation, and composes a list based on +how the down_revision identifiers link together with the down_revision of None representing the first file. + +* All information needed to be revised should be included in + ``` upgrade ``` function. + +5.After all revisions completed, run the migration ``` -alembic upgrade head +$ alembic upgrade head +INFO [alembic.context] Context class PostgresqlContext. +INFO [alembic.context] Will assume transactional DDL. +INFO [alembic.context] Running upgrade None -> 1975ea83b712 ``` -5.Running our Second Migration -``` -alembic revision -m "Add a column" -``` +6.All saved changes should be updated or removed by running new revision following steps mentioned above. From d34fb18602cccba206a64da12cac94298f7b9cc6 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 13:23:27 -0700 Subject: [PATCH 245/505] feat: added Alembic README --- alembic/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alembic/README.md b/alembic/README.md index 02bd813b..7645f95a 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -37,7 +37,7 @@ how the down_revision identifiers link together with the down_revision of None r * All information needed to be revised should be included in ``` upgrade ``` function. -5.After all revisions completed, run the migration +3.After all revisions completed, run the migration ``` $ alembic upgrade head @@ -46,4 +46,4 @@ INFO [alembic.context] Will assume transactional DDL. INFO [alembic.context] Running upgrade None -> 1975ea83b712 ``` -6.All saved changes should be updated or removed by running new revision following steps mentioned above. +4.All saved changes should be updated or removed by running new revision following steps mentioned above. From e317a7aa473f261749071ac49ae65deeb71f77f2 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 13:26:54 -0700 Subject: [PATCH 246/505] feat: added Alembic README --- alembic/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alembic/README.md b/alembic/README.md index 7645f95a..3a3f2d0d 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -32,7 +32,7 @@ Then, a new file ```1975ea83b712_create_account_table.py``` is generated. * ```down_revision``` runs an operation, and composes a list based on -how the down_revision identifiers link together with the down_revision of None representing the first file. +how the ```down_revision``` identifiers link together with the ```down_revision``` of ```None``` representing the first file. * All information needed to be revised should be included in ``` upgrade ``` function. From 63c8edb204775228456f89a3f7ecc0d3df75fc17 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 13:35:25 -0700 Subject: [PATCH 247/505] feat: minor syntax errors in readme --- alembic/README.md | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/alembic/README.md b/alembic/README.md index 3a3f2d0d..833c4058 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -1,19 +1,20 @@ -### Setup Alembic +# Setup Alembic To set up Alembic migration management, please follow the instructions below. -1.Create new revision -``` -alembic revision -m "create account table" -``` +1. Create new revision + ``` bash + alembic revision -m "create account table" + ``` -This will set a new directory ```alembic``` containing ``` versions``` -which contains all the revised files. -Then, a new file ```1975ea83b712_create_account_table.py``` is generated. + This will set a new directory alembic containing ` versions` + which contains all the revised files. + Then, a new file 1975ea83b712_create_account_table.py is generated. -2.After creating new file we can include all needed revisions in this file +2. After creating new file we can include all needed revisions in this file + ``` bash create account table Revision ID: 1975ea83b712 Revises: @@ -29,21 +30,20 @@ Then, a new file ```1975ea83b712_create_account_table.py``` is generated. def upgrade(): pass + ``` - -* ```down_revision``` runs an operation, and composes a list based on -how the ```down_revision``` identifiers link together with the ```down_revision``` of ```None``` representing the first file. +* `down_revision` runs an operation, and composes a list based on +how the `down_revision` identifiers link together with the `down_revision` of `None` representing the first file. -* All information needed to be revised should be included in - ``` upgrade ``` function. +* All information needed to be revised should be included in ` upgrade ` function. -3.After all revisions completed, run the migration +3. After all revisions completed, run the migration -``` -$ alembic upgrade head -INFO [alembic.context] Context class PostgresqlContext. -INFO [alembic.context] Will assume transactional DDL. -INFO [alembic.context] Running upgrade None -> 1975ea83b712 -``` + ``` bash + alembic upgrade head + INFO [alembic.context] Context class PostgresqlContext. + INFO [alembic.context] Will assume transactional DDL. + INFO [alembic.context] Running upgrade None -> 1975ea83b712 + ``` -4.All saved changes should be updated or removed by running new revision following steps mentioned above. +4. All saved changes should be updated or removed by running new revision following steps mentioned above. From 40cbacc588bdbeffe4216b9beb1ba63599e00d92 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 9 Oct 2023 15:13:36 -0700 Subject: [PATCH 248/505] feat: added autogenerate in migrations to update DB from all sources --- alembic/README.md | 12 ++++++++---- apis/contributor.py | 5 ----- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/alembic/README.md b/alembic/README.md index 833c4058..f31fa385 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -2,13 +2,17 @@ To set up Alembic migration management, please follow the instructions below. -1. Create new revision +1. Initiate new `alembic` directory ``` bash - alembic revision -m "create account table" + alembic init alembic + ``` + +2. Create new revision + ``` bash + alembic revision --autogenerate -m "create account table" ``` - This will set a new directory alembic containing ` versions` - which contains all the revised files. + This will set a new directory, namely ` versions` which contains all the revised files, in the alembic directory. Then, a new file 1975ea83b712_create_account_table.py is generated. diff --git a/apis/contributor.py b/apis/contributor.py index ec463638..d266863c 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -30,11 +30,6 @@ class AddContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def get(self, study_id: int): - # study_contributors = ( - # StudyContributor.query - # .filter(StudyContributor.user_id == g.user.id) # Filter contributors where user_id matches the user's id - # .all() - # ) contributors = StudyContributor.query.filter_by(study_id=study_id).all() invited_contributors = StudyInvitedContributor.query.filter_by( study_id=study_id From d106ac7e873bacf48ca24eddcd77b9a27d4053ee Mon Sep 17 00:00:00 2001 From: slugb0t Date: Mon, 9 Oct 2023 16:16:01 -0700 Subject: [PATCH 249/505] feat: get user details endpoint --- apis/authentication.py | 3 +++ apis/user.py | 25 +++++++++++++++++++++---- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index ff7b437c..88534590 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -102,6 +102,9 @@ def authentication(): """it authenticates users to a study, sets access and refresh token. In addition, it handles error handling of expired token and non existed users""" g.user = None + print(request) + print(request.cookies) + print(request.cookies.get("token")) if "token" not in request.cookies: return token = request.cookies.get("token") diff --git a/apis/user.py b/apis/user.py index 63c5149f..3e26362b 100644 --- a/apis/user.py +++ b/apis/user.py @@ -9,16 +9,33 @@ study_model = api.model( "User", - {}, + { + "first_name": fields.String(required=True, default=""), + "last_name": fields.String(required=True, default=""), + "institution": fields.String(required=True, default=""), + "orcid": fields.String(required=True, default=""), + "location": fields.String(required=True, default=""), + "timezone": fields.String(required=True, default=""), + }, ) @api.route("/profile") -class User(Resource): +class UserDetails(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_model) def get(self): - """this code returns user details""" - return User.query.all() + """This code returns user details""" + user = User.query.get(g.user.id) + return user.to_dict() + + @api.expect(study_model) + def put(self): + """This code updates user details""" + data = request.json + user = User.query.get(g.user.id) + user.update(data) + db.session.commit() + return user.to_dict() \ No newline at end of file From 22f53a10e362efdb85403a3f41cbae95c63d689d Mon Sep 17 00:00:00 2001 From: slugb0t Date: Mon, 9 Oct 2023 17:18:24 -0700 Subject: [PATCH 250/505] feat: put endpoint for updating user details --- apis/user.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/apis/user.py b/apis/user.py index 3e26362b..e0bedc87 100644 --- a/apis/user.py +++ b/apis/user.py @@ -1,7 +1,7 @@ from flask import request, g from flask_restx import Namespace, Resource, fields -from model import Study, db, User, StudyContributor +from model import Study, db, User, StudyContributor, UserDetails from .authentication import is_granted api = Namespace("User", description="User tables", path="/") @@ -21,21 +21,23 @@ @api.route("/profile") -class UserDetails(Resource): +class UserDetailsEndpoint(Resource): @api.doc("list_study") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_model) def get(self): - """This code returns user details""" + """Returns user details""" user = User.query.get(g.user.id) - return user.to_dict() + user_details = user.user_details + return user_details.to_dict() @api.expect(study_model) def put(self): - """This code updates user details""" + """Updates user details""" data = request.json user = User.query.get(g.user.id) - user.update(data) + user_details = user.user_details + user_details.update(data) db.session.commit() - return user.to_dict() \ No newline at end of file + return user_details.to_dict() \ No newline at end of file From a34621af5fa5f7010b71ea902912dff44fc29521 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 10 Oct 2023 00:19:20 +0000 Subject: [PATCH 251/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/user.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apis/user.py b/apis/user.py index e0bedc87..a149a763 100644 --- a/apis/user.py +++ b/apis/user.py @@ -31,7 +31,7 @@ def get(self): user = User.query.get(g.user.id) user_details = user.user_details return user_details.to_dict() - + @api.expect(study_model) def put(self): """Updates user details""" @@ -40,4 +40,4 @@ def put(self): user_details = user.user_details user_details.update(data) db.session.commit() - return user_details.to_dict() \ No newline at end of file + return user_details.to_dict() From cd78c14ab088e40d217525ae7456959087fb6940 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 10 Oct 2023 07:56:45 -0700 Subject: [PATCH 252/505] fix: updated readme --- alembic/README.md | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/alembic/README.md b/alembic/README.md index f31fa385..833c4058 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -2,17 +2,13 @@ To set up Alembic migration management, please follow the instructions below. -1. Initiate new `alembic` directory +1. Create new revision ``` bash - alembic init alembic - ``` - -2. Create new revision - ``` bash - alembic revision --autogenerate -m "create account table" + alembic revision -m "create account table" ``` - This will set a new directory, namely ` versions` which contains all the revised files, in the alembic directory. + This will set a new directory alembic containing ` versions` + which contains all the revised files. Then, a new file 1975ea83b712_create_account_table.py is generated. From 0fa2f5fbf5634f9595afb71175da0b690784d073 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 10 Oct 2023 11:23:23 -0700 Subject: [PATCH 253/505] fix: updated readme --- alembic/README.md | 3 ++ apis/contributor.py | 81 +++++++++++++++++++++------------------------ model/dataset.py | 2 +- 3 files changed, 42 insertions(+), 44 deletions(-) diff --git a/alembic/README.md b/alembic/README.md index 833c4058..0bf0a79d 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -47,3 +47,6 @@ how the `down_revision` identifiers link together with the `down_revision` of `N ``` 4. All saved changes should be updated or removed by running new revision following steps mentioned above. + + +You can refer to [Alembic official documentation](https://alembic.sqlalchemy.org/en/latest/) page for more details \ No newline at end of file diff --git a/apis/contributor.py b/apis/contributor.py index d266863c..b43b82a1 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,17 +1,12 @@ from collections import OrderedDict +import model from flask_restx import Namespace, Resource, fields from flask import request, g -from model import ( - Study, - db, - User, - StudyException, - StudyContributor, - StudyInvitedContributor, -) +import model from .authentication import is_granted + api = Namespace("Contributor", description="Contributors", path="/") @@ -30,8 +25,8 @@ class AddContributor(Resource): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def get(self, study_id: int): - contributors = StudyContributor.query.filter_by(study_id=study_id).all() - invited_contributors = StudyInvitedContributor.query.filter_by( + contributors = model.StudyContributor.query.filter_by(study_id=study_id).all() + invited_contributors = model.StudyInvitedContributor.query.filter_by( study_id=study_id ).all() @@ -44,12 +39,12 @@ def get(self, study_id: int): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify", 403 data = request.json email_address = data["email_address"] - user = User.query.filter_by(email_address=email_address).first() + user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] contributor_ = None try: @@ -57,9 +52,9 @@ def post(self, study_id: int): contributor_ = study_obj.add_user_to_study(user, permission) else: contributor_ = study_obj.invite_user_to_study(email_address, permission) - except StudyException as ex: + except model.StudyException as ex: return ex.args[0], 409 - db.session.commit() + model.db.session.commit() return contributor_.to_dict(), 201 @@ -71,7 +66,7 @@ class ContributorResource(Resource): @api.expect(contributors_model) def put(self, study_id: int, user_id: int): """update contributor based on the assigned permissions""" - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not study: return "study is not found", 404 if not is_granted("permission", study): @@ -80,16 +75,16 @@ def put(self, study_id: int, user_id: int): 403, ) data = request.json - user = User.query.get(user_id) + user = model.User.query.get(user_id) if not user: return "user not found", 404 permission = data["role"] - grantee = StudyContributor.query.filter( - StudyContributor.user == user, StudyContributor.study == study + grantee = model.StudyContributor.query.filter( + model.StudyContributor.user == user, model.StudyContributor.study == study ).first() - granter = StudyContributor.query.filter( - StudyContributor.user == g.user, StudyContributor.study == study + granter = model.StudyContributor.query.filter( + model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() # Order should go from the least privileged to the most privileged @@ -122,11 +117,11 @@ def put(self, study_id: int, user_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: str): - study = Study.query.get(study_id) + study =model.Study.query.get(study_id) if not study: return "study is not found", 404 - granter = StudyContributor.query.filter( - StudyContributor.user == g.user, StudyContributor.study == study + granter = model.StudyContributor.query.filter( + model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() if not granter: return "you are not contributor of this study", 403 @@ -137,32 +132,32 @@ def delete(self, study_id: int, user_id: str): grants["owner"] = ["editor", "viewer", "admin"] if "@" in user_id: - invited_grantee = StudyInvitedContributor.query.filter_by( + invited_grantee = model.StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() can_delete = invited_grantee.permission in grants[granter.permission] if not can_delete: return f"User cannot delete {invited_grantee.permission}", 403 - db.session.delete(invited_grantee) - db.session.commit() + model.db.session.delete(invited_grantee) + model.db.session.commit() return 204 - user = User.query.get(user_id) + user = model.User.query.get(user_id) if not user: return "user is not found", 404 - contributors = StudyContributor.query.filter( - StudyContributor.study == study + contributors = model.StudyContributor.query.filter( + model.StudyContributor.study == study ).all() print(len(contributors), "") - grantee = StudyContributor.query.filter( - StudyContributor.user == user, StudyContributor.study == study + grantee = model.StudyContributor.query.filter( + model.StudyContributor.user == user, model.StudyContributor.study == study ).first() if len(contributors) <= 1: return "the study must have at least one contributor", 422 if grantee.user == granter.user: if granter.permission == "owner": return "you must transfer ownership before removing yourself", 422 - db.session.delete(grantee) - db.session.commit() + model.db.session.delete(grantee) + model.db.session.commit() return 204 if not is_granted("delete_contributor", study): return ( @@ -172,8 +167,8 @@ def delete(self, study_id: int, user_id: str): can_delete = grantee.permission in grants[granter.permission] if not can_delete: return f"User cannot delete {grantee.permission}", 403 - db.session.delete(grantee) - db.session.commit() + model.db.session.delete(grantee) + model.db.session.commit() return 204 @@ -185,22 +180,22 @@ class AssignOwner(Resource): @api.expect(contributors_model) def put(self, study_id: int, user_id: int): """set owner based on the assigned permissions""" - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("make_owner", study): return ( "Access denied, you are not authorized to change this permission", 403, ) - user = User.query.get(user_id) - existing_contributor = StudyContributor.query.filter( - StudyContributor.user == user, - StudyContributor.study == study, + user = model.User.query.get(user_id) + existing_contributor = model.StudyContributor.query.filter( + model.StudyContributor.user == user, + model.StudyContributor.study == study, ).first() existing_contributor.permission = "owner" - existing_owner = StudyContributor.query.filter( - StudyContributor.study == study, StudyContributor.permission == "owner" + existing_owner = model.StudyContributor.query.filter( + model.StudyContributor.study == study, model.StudyContributor.permission == "owner" ).first() existing_owner.permission = "admin" - db.session.commit() + model.db.session.commit() return 204 diff --git a/model/dataset.py b/model/dataset.py index 91accda4..37f0f844 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -107,7 +107,7 @@ def last_modified(self): return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() @staticmethod - def from_data(study, data: dict): + def from_data(study: model.Study, data: dict): dataset_obj = Dataset(study) dataset_obj.update(data) return dataset_obj From 7f5b7e4baf87a5a1e9edbaa87c70b91639ee9f64 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 10 Oct 2023 18:24:04 +0000 Subject: [PATCH 254/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index b43b82a1..6d479a49 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -117,7 +117,7 @@ def put(self, study_id: int, user_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: str): - study =model.Study.query.get(study_id) + study = model.Study.query.get(study_id) if not study: return "study is not found", 404 granter = model.StudyContributor.query.filter( @@ -193,7 +193,8 @@ def put(self, study_id: int, user_id: int): ).first() existing_contributor.permission = "owner" existing_owner = model.StudyContributor.query.filter( - model.StudyContributor.study == study, model.StudyContributor.permission == "owner" + model.StudyContributor.study == study, + model.StudyContributor.permission == "owner", ).first() existing_owner.permission = "admin" From 00950e290a73a9c58a9e2869083a027144a78eef Mon Sep 17 00:00:00 2001 From: slugb0t Date: Tue, 10 Oct 2023 12:07:11 -0700 Subject: [PATCH 255/505] feat: user details endpoint combines user and user_details table --- apis/user.py | 17 +++++++++++++---- model/user.py | 2 +- model/user_details.py | 4 ++++ 3 files changed, 18 insertions(+), 5 deletions(-) diff --git a/apis/user.py b/apis/user.py index a149a763..61e50361 100644 --- a/apis/user.py +++ b/apis/user.py @@ -1,8 +1,7 @@ from flask import request, g from flask_restx import Namespace, Resource, fields -from model import Study, db, User, StudyContributor, UserDetails -from .authentication import is_granted +from model import User, db api = Namespace("User", description="User tables", path="/") @@ -10,17 +9,20 @@ study_model = api.model( "User", { + "email_address": fields.String(required=True, default=""), + "username": fields.String(required=True, default=""), "first_name": fields.String(required=True, default=""), "last_name": fields.String(required=True, default=""), "institution": fields.String(required=True, default=""), "orcid": fields.String(required=True, default=""), "location": fields.String(required=True, default=""), "timezone": fields.String(required=True, default=""), + "profile_image": fields.String(required=False, default=""), }, ) -@api.route("/profile") +@api.route("/user/profile") class UserDetailsEndpoint(Resource): @api.doc("list_study") @api.response(200, "Success") @@ -30,13 +32,20 @@ def get(self): """Returns user details""" user = User.query.get(g.user.id) user_details = user.user_details - return user_details.to_dict() + user_information = user.to_dict() + user_information.update(user_details.to_dict()) + return user_information @api.expect(study_model) def put(self): """Updates user details""" data = request.json + # verify data follows study_model schema except for profile_image + if data is None: + return {"message": "No data provided"}, 400 user = User.query.get(g.user.id) + # update email and username in user table and other fields in user_details table + user.update(data) user_details = user.user_details user_details.update(data) db.session.commit() diff --git a/model/user.py b/model/user.py index e98b69ab..f171a62b 100644 --- a/model/user.py +++ b/model/user.py @@ -43,7 +43,7 @@ def from_data(data: dict): def update(self, data): self.email_address = data["email_address"] - self.username = data["email_address"] + self.username = data["username"] # self.email_verified = data["email_verified"] # self.username = data["username"] # self.hash = data["hash"] diff --git a/model/user_details.py b/model/user_details.py index 280fed21..e3ba685c 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -11,6 +11,7 @@ def __init__(self, user): self.location = "" self.timezone = "" self.orcid = "" + self.profile_image = "" self.user = user __tablename__ = "user_details" @@ -20,6 +21,7 @@ def __init__(self, user): institution = db.Column(db.String, nullable=True) orcid = db.Column(db.String, nullable=True) location = db.Column(db.String, nullable=True) + profile_image = db.Column(db.String, nullable=True) timezone = db.Column(db.String, nullable=True) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) @@ -34,6 +36,7 @@ def to_dict(self): "orcid": self.orcid, "location": self.location, "timezone": self.timezone, + "profile_image": self.profile_image, } @staticmethod @@ -49,3 +52,4 @@ def update(self, data): self.orcid = data["orcid"] self.location = data["location"] self.timezone = data["timezone"] + self.profile_image = data["profile_image"] if "profile_image" in data else "" From 00c37a8ccaae20999fdf791c57b3eebab94be976 Mon Sep 17 00:00:00 2001 From: slugb0t Date: Tue, 10 Oct 2023 12:11:57 -0700 Subject: [PATCH 256/505] refactor: add marshal_with and success model ex for get --- apis/user.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/apis/user.py b/apis/user.py index 61e50361..b2d4e57a 100644 --- a/apis/user.py +++ b/apis/user.py @@ -25,9 +25,8 @@ @api.route("/user/profile") class UserDetailsEndpoint(Resource): @api.doc("list_study") - @api.response(200, "Success") + @api.response(200, "Success", study_model) @api.response(400, "Validation Error") - # @api.marshal_with(study_model) def get(self): """Returns user details""" user = User.query.get(g.user.id) @@ -37,14 +36,13 @@ def get(self): return user_information @api.expect(study_model) + @api.marshal_with(study_model) def put(self): """Updates user details""" data = request.json - # verify data follows study_model schema except for profile_image if data is None: return {"message": "No data provided"}, 400 user = User.query.get(g.user.id) - # update email and username in user table and other fields in user_details table user.update(data) user_details = user.user_details user_details.update(data) From e5e1ff77d32d08945066a70bbe2bd3650b85e22a Mon Sep 17 00:00:00 2001 From: slugb0t Date: Tue, 10 Oct 2023 14:18:42 -0700 Subject: [PATCH 257/505] refactor: update spacing for favicon.ico --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 8bc7d139..6888230e 100644 --- a/app.py +++ b/app.py @@ -115,7 +115,7 @@ def on_after_request(resp): "/echo", "/swaggerui", "/swagger.json", - "/ favicon.ico", + "/favicon.ico", ] for route in public_routes: if request.path.startswith(route): From 0b15ad478bd940d4607998d02a12c4c96b2113a7 Mon Sep 17 00:00:00 2001 From: slugb0t Date: Tue, 10 Oct 2023 14:21:45 -0700 Subject: [PATCH 258/505] chore: remove print statements --- apis/authentication.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 88534590..ba981b59 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -102,9 +102,7 @@ def authentication(): """it authenticates users to a study, sets access and refresh token. In addition, it handles error handling of expired token and non existed users""" g.user = None - print(request) - print(request.cookies) - print(request.cookies.get("token")) + if "token" not in request.cookies: return token = request.cookies.get("token") From ac52ce684f2b145af79ccb0175f62e1fb419b608 Mon Sep 17 00:00:00 2001 From: slugb0t Date: Tue, 10 Oct 2023 14:46:57 -0700 Subject: [PATCH 259/505] refactor: update username as email if no username --- model/user.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/user.py b/model/user.py index f171a62b..8f30e6bd 100644 --- a/model/user.py +++ b/model/user.py @@ -43,7 +43,7 @@ def from_data(data: dict): def update(self, data): self.email_address = data["email_address"] - self.username = data["username"] + self.username = data["username"] if "username" in data else data["email_address"] # self.email_verified = data["email_verified"] # self.username = data["username"] # self.hash = data["hash"] From 563baf83a7156756656a72c08c17759809db32ce Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 10 Oct 2023 21:47:46 +0000 Subject: [PATCH 260/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/user.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/model/user.py b/model/user.py index 8f30e6bd..56ee689f 100644 --- a/model/user.py +++ b/model/user.py @@ -43,7 +43,9 @@ def from_data(data: dict): def update(self, data): self.email_address = data["email_address"] - self.username = data["username"] if "username" in data else data["email_address"] + self.username = ( + data["username"] if "username" in data else data["email_address"] + ) # self.email_verified = data["email_verified"] # self.username = data["username"] # self.hash = data["hash"] From 842257ffb5f109ae39e542788f6a3824559fa6aa Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 10 Oct 2023 16:16:07 -0700 Subject: [PATCH 261/505] fix: alembic dir --- alembic.ini | 114 ++++++++++++++++++++++++++++++++++++++++++++++ alembic/README.md | 5 +- alembic/env.py | 82 +++++++++++++++++++++++++++++++++ app.py | 2 +- model/dataset.py | 2 +- 5 files changed, 199 insertions(+), 6 deletions(-) create mode 100644 alembic.ini create mode 100644 alembic/env.py diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 00000000..f8dbdd42 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,114 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = %(FAIRHUB_DATABASE_URL)s + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README.md b/alembic/README.md index 0bf0a79d..435c5acc 100644 --- a/alembic/README.md +++ b/alembic/README.md @@ -46,7 +46,4 @@ how the `down_revision` identifiers link together with the `down_revision` of `N INFO [alembic.context] Running upgrade None -> 1975ea83b712 ``` -4. All saved changes should be updated or removed by running new revision following steps mentioned above. - - -You can refer to [Alembic official documentation](https://alembic.sqlalchemy.org/en/latest/) page for more details \ No newline at end of file +All saved changes should be updated or removed by running new revision following steps mentioned above. You can refer to [Alembic official documentation](https://alembic.sqlalchemy.org/en/latest/) for more details. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 00000000..4ba76da3 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,82 @@ +from logging.config import fileConfig +from os import environ +from sqlalchemy import engine_from_config +from sqlalchemy import pool +from dotenv import load_dotenv + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +load_dotenv() + +config = context.config +section = config.config_ini_section +print(section, environ.get("FAIRHUB_DATABASE_URL")) +config.set_section_option(section, "FAIRHUB_DATABASE_URL", str(environ.get("FAIRHUB_DATABASE_URL"))) +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/app.py b/app.py index 8bc7d139..6888230e 100644 --- a/app.py +++ b/app.py @@ -115,7 +115,7 @@ def on_after_request(resp): "/echo", "/swaggerui", "/swagger.json", - "/ favicon.ico", + "/favicon.ico", ] for route in public_routes: if request.path.startswith(route): diff --git a/model/dataset.py b/model/dataset.py index 37f0f844..91accda4 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -107,7 +107,7 @@ def last_modified(self): return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() @staticmethod - def from_data(study: model.Study, data: dict): + def from_data(study, data: dict): dataset_obj = Dataset(study) dataset_obj.update(data) return dataset_obj From 3095bdc44a2a0414518c431a7872ee3f58eda05b Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 10 Oct 2023 23:18:10 +0000 Subject: [PATCH 262/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- alembic/env.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/alembic/env.py b/alembic/env.py index 4ba76da3..46675a44 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -13,7 +13,9 @@ config = context.config section = config.config_ini_section print(section, environ.get("FAIRHUB_DATABASE_URL")) -config.set_section_option(section, "FAIRHUB_DATABASE_URL", str(environ.get("FAIRHUB_DATABASE_URL"))) +config.set_section_option( + section, "FAIRHUB_DATABASE_URL", str(environ.get("FAIRHUB_DATABASE_URL")) +) # Interpret the config file for Python logging. # This line sets up loggers basically. if config.config_file_name is not None: @@ -68,9 +70,7 @@ def run_migrations_online() -> None: poolclass=pool.NullPool, ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() From 1f00d453d0fe9083e0cdaa414509a3b8be1d9c50 Mon Sep 17 00:00:00 2001 From: slugb0t Date: Tue, 10 Oct 2023 16:19:09 -0700 Subject: [PATCH 263/505] refactor: prevent email and username from being changed, comments added --- apis/user.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/apis/user.py b/apis/user.py index b2d4e57a..9d4ae54b 100644 --- a/apis/user.py +++ b/apis/user.py @@ -24,7 +24,7 @@ @api.route("/user/profile") class UserDetailsEndpoint(Resource): - @api.doc("list_study") + @api.doc(description="Returns user details gathered from the user and user_details tables") @api.response(200, "Success", study_model) @api.response(400, "Validation Error") def get(self): @@ -32,6 +32,7 @@ def get(self): user = User.query.get(g.user.id) user_details = user.user_details user_information = user.to_dict() + # combine user and user_details to return a single object user_information.update(user_details.to_dict()) return user_information @@ -43,8 +44,12 @@ def put(self): if data is None: return {"message": "No data provided"}, 400 user = User.query.get(g.user.id) - user.update(data) + # user.update(data) # don't update the username and email_address for now user_details = user.user_details user_details.update(data) db.session.commit() - return user_details.to_dict() + + # combine user and user_details to return a single object + user_information = user.to_dict() + user_information.update(user_details.to_dict()) + return user_information From 57a91fdf646c01301992b08617d3ea2207e57fdd Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 10 Oct 2023 23:19:52 +0000 Subject: [PATCH 264/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/user.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/apis/user.py b/apis/user.py index 9d4ae54b..adb741b0 100644 --- a/apis/user.py +++ b/apis/user.py @@ -24,7 +24,9 @@ @api.route("/user/profile") class UserDetailsEndpoint(Resource): - @api.doc(description="Returns user details gathered from the user and user_details tables") + @api.doc( + description="Returns user details gathered from the user and user_details tables" + ) @api.response(200, "Success", study_model) @api.response(400, "Validation Error") def get(self): @@ -48,7 +50,7 @@ def put(self): user_details = user.user_details user_details.update(data) db.session.commit() - + # combine user and user_details to return a single object user_information = user.to_dict() user_information.update(user_details.to_dict()) From 1b226f9dd0fb9eeda135184769e6871972dfc7a4 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 10 Oct 2023 16:57:46 -0700 Subject: [PATCH 265/505] feat: add revision --- ...ae7169083_add_column_in_token_blacklist.py | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 alembic/versions/6ebae7169083_add_column_in_token_blacklist.py diff --git a/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py b/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py new file mode 100644 index 00000000..142c85f5 --- /dev/null +++ b/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py @@ -0,0 +1,26 @@ +"""add column in token_blacklist + +Revision ID: 6ebae7169083 +Revises: 6d4271d33834 +Create Date: 2023-10-09 15:48:38.553510 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '6ebae7169083' +down_revision: Union[str, None] = '6d4271d33834' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + pass + + +def downgrade() -> None: + pass From 3cf1f4f2e05ebdc6384a30c7df37fef7351f8419 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 10 Oct 2023 23:59:03 +0000 Subject: [PATCH 266/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../versions/6ebae7169083_add_column_in_token_blacklist.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py b/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py index 142c85f5..8f075846 100644 --- a/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py +++ b/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py @@ -12,8 +12,8 @@ # revision identifiers, used by Alembic. -revision: str = '6ebae7169083' -down_revision: Union[str, None] = '6d4271d33834' +revision: str = "6ebae7169083" +down_revision: Union[str, None] = "6d4271d33834" branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None From 5be9a93b615747863c0dd8027040ffeba48bbf8e Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 10 Oct 2023 18:01:10 -0700 Subject: [PATCH 267/505] style: formatting --- apis/__init__.py | 56 +++++++++---------- apis/authentication.py | 22 +++++--- apis/contributor.py | 6 +- apis/dataset.py | 3 +- apis/dataset_metadata/dataset_access.py | 6 +- .../dataset_alternate_identifier.py | 4 +- apis/dataset_metadata/dataset_consent.py | 7 +-- apis/dataset_metadata/dataset_date.py | 7 +-- .../dataset_de_ident_level.py | 5 +- apis/dataset_metadata/dataset_description.py | 6 +- apis/dataset_metadata/dataset_funder.py | 5 +- .../dataset_managing_organization.py | 4 +- apis/dataset_metadata/dataset_other.py | 5 +- apis/dataset_metadata/dataset_readme.py | 5 +- apis/dataset_metadata/dataset_record_keys.py | 6 +- apis/dataset_metadata/dataset_related_item.py | 6 +- .../dataset_related_item_contributor.py | 6 +- .../dataset_related_item_identifier.py | 7 +-- .../dataset_related_item_other.py | 6 +- .../dataset_related_item_title.py | 6 +- apis/dataset_metadata/dataset_rights.py | 7 +-- apis/dataset_metadata/dataset_subject.py | 7 +-- apis/dataset_metadata/dataset_title.py | 8 +-- apis/dataset_metadata_namespace.py | 1 - apis/participant.py | 5 +- apis/study.py | 5 +- apis/study_metadata/study_arm.py | 8 +-- apis/study_metadata/study_available_ipd.py | 7 ++- apis/study_metadata/study_contact.py | 7 ++- apis/study_metadata/study_description.py | 6 +- apis/study_metadata/study_design.py | 8 +-- apis/study_metadata/study_eligibility.py | 7 +-- apis/study_metadata/study_identification.py | 7 +-- apis/study_metadata/study_intervention.py | 7 +-- apis/study_metadata/study_ipdsharing.py | 7 +-- apis/study_metadata/study_link.py | 6 +- apis/study_metadata/study_location.py | 7 +-- apis/study_metadata/study_other.py | 6 +- apis/study_metadata/study_overall_official.py | 7 +-- apis/study_metadata/study_reference.py | 7 +-- .../study_sponsors_collaborators.py | 7 +-- apis/study_metadata/study_status.py | 7 +-- apis/study_metadata_namespace.py | 1 - apis/user.py | 2 +- app.py | 15 ++--- model/__init__.py | 53 ++++++++---------- model/dataset.py | 3 +- model/dataset_contributor.py | 1 + model/dataset_metadata/dataset_access.py | 1 + .../dataset_alternate_identifier.py | 1 + model/dataset_metadata/dataset_consent.py | 1 + .../dataset_contributor_affiliation.py | 1 + model/dataset_metadata/dataset_date.py | 1 + .../dataset_de_ident_level.py | 1 + model/dataset_metadata/dataset_description.py | 1 + model/dataset_metadata/dataset_funder.py | 1 + .../dataset_managing_organization.py | 1 + model/dataset_metadata/dataset_other.py | 4 +- model/dataset_metadata/dataset_readme.py | 1 + model/dataset_metadata/dataset_record_keys.py | 1 + .../dataset_metadata/dataset_related_item.py | 1 + .../dataset_related_item_contributor.py | 1 + .../dataset_related_item_identifier.py | 1 + .../dataset_related_item_other.py | 1 + .../dataset_related_item_title.py | 1 + model/dataset_metadata/dataset_rights.py | 1 + model/dataset_metadata/dataset_subject.py | 1 + model/dataset_metadata/dataset_title.py | 1 + model/email_verification.py | 3 +- model/invited_study_contributor.py | 6 +- model/participant.py | 6 +- model/study.py | 12 ++-- model/study_contributor.py | 4 +- model/study_metadata/study_arm.py | 5 +- model/study_metadata/study_available_ipd.py | 4 +- model/study_metadata/study_contact.py | 5 +- model/study_metadata/study_description.py | 1 + model/study_metadata/study_design.py | 1 + model/study_metadata/study_eligibility.py | 4 +- model/study_metadata/study_identification.py | 5 +- model/study_metadata/study_intervention.py | 8 ++- model/study_metadata/study_ipdsharing.py | 4 +- model/study_metadata/study_link.py | 5 +- model/study_metadata/study_location.py | 5 +- model/study_metadata/study_other.py | 1 + .../study_metadata/study_overall_official.py | 5 +- model/study_metadata/study_reference.py | 4 +- .../study_sponsors_collaborators.py | 4 +- model/user.py | 9 +-- model/user_details.py | 1 + model/version.py | 4 +- 91 files changed, 271 insertions(+), 255 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index bcf98176..ba9e7847 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -4,15 +4,34 @@ from apis.dataset_metadata_namespace import api as dataset_metadata_namespace from apis.study_metadata_namespace import api as study_metadata_namespace +from .authentication import api as authentication from .contributor import api as contributors_api - from .dataset import api as dataset_api +from .dataset_metadata.dataset_access import api as access +from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier +from .dataset_metadata.dataset_consent import api as consent +from .dataset_metadata.dataset_date import api as date +from .dataset_metadata.dataset_de_ident_level import api as de_ident_level +from .dataset_metadata.dataset_description import api as description +from .dataset_metadata.dataset_funder import api as funder +from .dataset_metadata.dataset_managing_organization import api as managing_organization +from .dataset_metadata.dataset_other import api as dataset_other +from .dataset_metadata.dataset_readme import api as readme +from .dataset_metadata.dataset_record_keys import api as record_keys +from .dataset_metadata.dataset_related_item import api as related_item +from .dataset_metadata.dataset_related_item_contributor import ( + api as related_item_contributor, +) +from .dataset_metadata.dataset_related_item_identifier import ( + api as related_item_identifier, +) +from .dataset_metadata.dataset_related_item_other import api as related_item_other +from .dataset_metadata.dataset_related_item_title import api as related_item_title +from .dataset_metadata.dataset_rights import api as rights +from .dataset_metadata.dataset_subject import api as subject +from .dataset_metadata.dataset_title import api as title from .participant import api as participants_api from .study import api as study_api -from .user import api as user - -from .authentication import api as authentication - from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd from .study_metadata.study_contact import api as contact @@ -29,32 +48,7 @@ from .study_metadata.study_reference import api as reference from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator from .study_metadata.study_status import api as status - - -from .dataset_metadata.dataset_access import api as access -from .dataset_metadata.dataset_consent import api as consent -from .dataset_metadata.dataset_subject import api as subject -from .dataset_metadata.dataset_description import api as description -from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier -from .dataset_metadata.dataset_other import api as dataset_other -from .dataset_metadata.dataset_date import api as date -from .dataset_metadata.dataset_de_ident_level import api as de_ident_level -from .dataset_metadata.dataset_managing_organization import api as managing_organization -from .dataset_metadata.dataset_readme import api as readme -from .dataset_metadata.dataset_record_keys import api as record_keys -from .dataset_metadata.dataset_rights import api as rights -from .dataset_metadata.dataset_title import api as title -from .dataset_metadata.dataset_related_item import api as related_item -from .dataset_metadata.dataset_related_item_title import api as related_item_title -from .dataset_metadata.dataset_related_item_contributor import ( - api as related_item_contributor, -) -from .dataset_metadata.dataset_related_item_identifier import ( - api as related_item_identifier, -) -from .dataset_metadata.dataset_related_item_other import api as related_item_other -from .dataset_metadata.dataset_funder import api as funder - +from .user import api as user api = Api( title="FAIRHUB", diff --git a/apis/authentication.py b/apis/authentication.py index ba981b59..d68ce102 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -1,13 +1,21 @@ -from flask import request, make_response, g -from flask_restx import Namespace, Resource, fields -from model import StudyContributor -from datetime import timezone import datetime -from model import db, User, TokenBlacklist, Study, StudyInvitedContributor +import re +import uuid +from datetime import timezone + import jwt +from flask import g, make_response, request +from flask_restx import Namespace, Resource, fields + import config -import uuid -import re +from model import ( + Study, + StudyContributor, + StudyInvitedContributor, + TokenBlacklist, + User, + db, +) api = Namespace("Authentication", description="Authentication paths", path="/") diff --git a/apis/contributor.py b/apis/contributor.py index 6d479a49..0b96f389 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,11 +1,11 @@ from collections import OrderedDict -import model +from flask import g, request from flask_restx import Namespace, Resource, fields -from flask import request, g + import model -from .authentication import is_granted +from .authentication import is_granted api = Namespace("Contributor", description="Contributors", path="/") diff --git a/apis/dataset.py b/apis/dataset.py index 8abebccb..7f3bf2c5 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,7 +1,8 @@ from flask import Response, jsonify, request from flask_restx import Namespace, Resource, fields -from model import Dataset, Version, Participant, Study, db +from model import Dataset, Participant, Study, Version, db + from .authentication import is_granted api = Namespace("Dataset", description="Dataset operations", path="/") diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index 4e3350cd..bc9cef76 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -1,10 +1,8 @@ -from model import Dataset, DatasetAccess, db - -from flask_restx import Resource, fields from flask import request +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api - +from model import Dataset, DatasetAccess, db dataset_access = api.model( "DatasetAccess", diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 89194b6e..0cf35726 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,8 +1,8 @@ -from model import Dataset, db, DatasetAlternateIdentifier -from flask_restx import Resource, fields from flask import request +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetAlternateIdentifier, db dataset_identifier = api.model( "DatasetAlternateIdentifier", diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 5acba9c3..c670dce0 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,9 +1,8 @@ -from model import Dataset, DatasetConsent, db - -from flask_restx import Resource, fields from flask import request -from apis.dataset_metadata_namespace import api +from flask_restx import Resource, fields +from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetConsent, db dataset_consent = api.model( "DatasetConsent", diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index d05015c1..459c14ee 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,9 +1,8 @@ -from model import Dataset, db, DatasetDate - -from flask_restx import Resource, fields from flask import request -from apis.dataset_metadata_namespace import api +from flask_restx import Resource, fields +from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetDate, db dataset_date = api.model( "DatasetDate", diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 648a625c..5fcb0898 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -1,9 +1,8 @@ -from model import Dataset, DatasetDeIdentLevel, db - -from flask_restx import Resource, fields from flask import request +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, db de_ident_level = api.model( "DatasetDeIdentLevel", diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index bc9059a3..1270d550 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -1,10 +1,8 @@ -from model import Dataset, db, DatasetDescription - -from flask_restx import Resource, fields from flask import request - +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetDescription, db dataset_description = api.model( "DatasetDescription", diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 7b1d3e66..cba5437e 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,9 +1,8 @@ -from model import Dataset, DatasetFunder, db - -from flask_restx import Resource, fields from flask import request +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetFunder, db dataset_funder = api.model( "DatasetFunder", diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 9c2a4226..032e65d9 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -1,10 +1,8 @@ -from model import Dataset, db, DatasetManagingOrganization from flask import request - from flask_restx import Resource, fields - from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetManagingOrganization, db managing_organization = api.model( "DatasetManagingOrganization", diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 7d4748d7..a198ac47 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,9 +1,8 @@ -from model import Dataset, db, DatasetOther from flask import request - -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, db dataset_other = api.model( "DatasetOther", diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index 391f93cc..043cd764 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -1,9 +1,8 @@ -from model import Dataset, db, DatasetReadme from flask import request - -from flask_restx import Namespace, Resource, fields +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, db dataset_readme = api.model( "DatasetReadme", diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 7ce9ec2c..1467ad53 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,8 +1,8 @@ -from model import Dataset, DatasetRecordKeys, db - -from flask_restx import Resource, fields from flask import request +from flask_restx import Resource, fields + from apis.dataset_metadata_namespace import api +from model import Dataset, db dataset_record_keys = api.model( "DatasetRecordKeys", diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index b2bb5450..4b863fde 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,8 +1,8 @@ -from model import Dataset, DatasetRelatedItem, db +from flask import request +from flask_restx import Resource, fields -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetRelatedItem, db dataset_related_item = api.model( "DatasetRelatedItem", diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py index 3a0d9d0e..96def854 100644 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -1,9 +1,7 @@ -from model import Dataset, DatasetRelatedItemContributor, db - -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask_restx import Resource from apis.dataset_metadata_namespace import api +from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py index 041d308c..17e54db3 100644 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -1,10 +1,7 @@ -from model import Dataset, DatasetRelatedItemIdentifier, db - -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request - +from flask_restx import Resource from apis.dataset_metadata_namespace import api +from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py index d2328594..be39a9e7 100644 --- a/apis/dataset_metadata/dataset_related_item_other.py +++ b/apis/dataset_metadata/dataset_related_item_other.py @@ -1,9 +1,7 @@ -from model import Dataset, DatasetRelatedItemOther, db - -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask_restx import Resource from apis.dataset_metadata_namespace import api +from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py index 1ceaab46..f6c00841 100644 --- a/apis/dataset_metadata/dataset_related_item_title.py +++ b/apis/dataset_metadata/dataset_related_item_title.py @@ -1,9 +1,7 @@ -from model import Dataset, DatasetRelatedItemTitle, db - -from flask_restx import Namespace, Resource, fields - +from flask_restx import Resource from apis.dataset_metadata_namespace import api +from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemTitle", diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 9ff7dd4b..1a00e76b 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -1,9 +1,8 @@ -from model import Dataset, DatasetRights, db - -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask import request +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetRights, db dataset_rights = api.model( "DatasetRights", diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 4c763b7a..a1b4b680 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -1,9 +1,8 @@ -from model import Dataset, DatasetSubject, db - -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request +from flask import request +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetSubject, db dataset_subject = api.model( "DatasetSubject", diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 3e8e8291..dcd885d1 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,10 +1,8 @@ -from model import Dataset, DatasetTitle, db - -from flask_restx import Namespace, Resource, fields -from flask import jsonify, request - +from flask import request +from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api +from model import Dataset, DatasetTitle, db dataset_title = api.model( "DatasetTitle", diff --git a/apis/dataset_metadata_namespace.py b/apis/dataset_metadata_namespace.py index 05837562..57c5820d 100644 --- a/apis/dataset_metadata_namespace.py +++ b/apis/dataset_metadata_namespace.py @@ -1,4 +1,3 @@ from flask_restx import Namespace - api = Namespace("Dataset Metadata", description="Dataset operations", path="/") diff --git a/apis/participant.py b/apis/participant.py index 8b42de2a..8ab874df 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -1,7 +1,8 @@ -from flask import Response, jsonify, request, g +from flask import Response, g, jsonify, request from flask_restx import Namespace, Resource, fields -from model import Participant, Study, db, StudyContributor +from model import Participant, Study, StudyContributor, db + from .authentication import is_granted api = Namespace("Participant", description="Participant operations", path="/") diff --git a/apis/study.py b/apis/study.py index 932cb786..6f0daa44 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,7 +1,8 @@ -from flask import request, g +from flask import g, request from flask_restx import Namespace, Resource, fields -from model import Study, db, User, StudyContributor +from model import Study, StudyContributor, User, db + from .authentication import is_granted api = Namespace("Study", description="Study operations", path="/") diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 4173214b..3e33cacc 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,11 +1,11 @@ """API routes for study arm metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyArm, Arm -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Arm, Study, StudyArm, db + +from ..authentication import is_granted arm_object = api.model( "ArmObject", diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index c35f8273..f0768430 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,10 +1,11 @@ """API routes for study available ipd metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyAvailableIpd +from flask_restx import Resource, fields + from apis.study_metadata_namespace import api -from ..authentication import is_granted +from model import Study, StudyAvailableIpd, db +from ..authentication import is_granted study_available = api.model( "StudyAvailable", diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index e7f6dcbd..20cf6411 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -1,10 +1,11 @@ """API routes for study contact metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyContact +from flask_restx import Resource, fields + from apis.study_metadata_namespace import api -from ..authentication import is_granted, is_study_metadata +from model import Study, StudyContact, db +from ..authentication import is_granted study_contact = api.model( "StudyContact", diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 086e8620..9e9cea8e 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -1,11 +1,11 @@ """API routes for study description metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db -from ..authentication import is_granted, is_study_metadata +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, db +from ..authentication import is_granted study_description = api.model( "StudyDescription", diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 0821708a..04098c82 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -1,11 +1,11 @@ """API routes for study design metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, db + +from ..authentication import is_granted study_design = api.model( "StudyDesign", diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 0a0d5955..e1806f32 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -1,12 +1,11 @@ """API routes for study eligibility metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, db +from ..authentication import is_granted study_eligibility = api.model( "StudyEligibility", diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 718564d0..0fc492a1 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,12 +1,11 @@ """API routes for study identification metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyIdentification, Identifiers -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Identifiers, Study, StudyIdentification, db +from ..authentication import is_granted study_identification = api.model( "StudyIdentification", diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 4de6d19e..359a185d 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -1,12 +1,11 @@ """API routes for study intervention metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyIntervention -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, StudyIntervention, db +from ..authentication import is_granted study_intervention = api.model( "StudyIntervention", diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index d4cc2701..08811716 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -1,12 +1,11 @@ """API routes for study ipdsharing metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, db +from ..authentication import is_granted study_ipdsharing = api.model( "StudyIpdsharing", diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index ad4b9397..680e57aa 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -1,11 +1,11 @@ """API routes for study link metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyLink -from ..authentication import is_granted +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, StudyLink, db +from ..authentication import is_granted study_link = api.model( "StudyLink", diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index d9977234..ff6687e0 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -1,12 +1,11 @@ """API routes for study location metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyLocation -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, StudyLocation, db +from ..authentication import is_granted study_location = api.model( "StudyLocation", diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 5c1b3011..1d8f39bc 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -1,11 +1,11 @@ """API routes for study other metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db -from ..authentication import is_granted +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, db +from ..authentication import is_granted study_other = api.model( "StudyOther", diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 9c1c6a16..db42bbe3 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,12 +1,11 @@ """API routes for study overall official metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyOverallOfficial - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api -from ..authentication import is_granted +from model import Study, StudyOverallOfficial, db +from ..authentication import is_granted study_overall_official = api.model( "StudyOverallOfficial", diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 93f27447..2d45c44d 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,12 +1,11 @@ """API routes for study reference metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db, StudyReference -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, StudyReference, db +from ..authentication import is_granted study_reference = api.model( "StudyReference", diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 89566ffe..5a1b82a1 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -1,12 +1,11 @@ """API routes for study sponsors and collaborators metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db -from ..authentication import is_granted - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, db +from ..authentication import is_granted study_sponsors = api.model( "StudySponsors", diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 3fe2b9e5..eec16fa8 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -1,12 +1,11 @@ """API routes for study status metadata""" -from flask_restx import Resource, fields from flask import request -from model import Study, db -from ..authentication import is_granted, is_study_metadata - +from flask_restx import Resource, fields from apis.study_metadata_namespace import api +from model import Study, db +from ..authentication import is_granted, is_study_metadata study_status = api.model( "StudyStatus", diff --git a/apis/study_metadata_namespace.py b/apis/study_metadata_namespace.py index 615f074f..11542e54 100644 --- a/apis/study_metadata_namespace.py +++ b/apis/study_metadata_namespace.py @@ -1,4 +1,3 @@ from flask_restx import Namespace - api = Namespace("Study Metadata", description="dataset operations", path="/") diff --git a/apis/user.py b/apis/user.py index adb741b0..2fd8de1d 100644 --- a/apis/user.py +++ b/apis/user.py @@ -1,4 +1,4 @@ -from flask import request, g +from flask import g, request from flask_restx import Namespace, Resource, fields from model import User, db diff --git a/app.py b/app.py index 6888230e..012d801d 100644 --- a/app.py +++ b/app.py @@ -1,17 +1,18 @@ """Entry point for the application.""" -from apis.exception import ValidationException -from flask import Flask, request, make_response, g +import datetime +from datetime import timezone + import jwt -import config +from flask import Flask, g, make_response, request +from flask_bcrypt import Bcrypt from flask_cors import CORS from sqlalchemy import MetaData -from datetime import timezone -import datetime +import config import model from apis import api -from flask_bcrypt import Bcrypt -from apis.authentication import authentication, authorization, UnauthenticatedException +from apis.authentication import UnauthenticatedException, authentication, authorization +from apis.exception import ValidationException # from pyfairdatatools import __version__ diff --git a/model/__init__.py b/model/__init__.py index 772b980f..83ec4d68 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,20 +1,17 @@ -from .version import Version -from .dataset_versions import DatasetVersions +from model.dataset_metadata.dataset_related_item import DatasetRelatedItem +from model.dataset_metadata.dataset_related_item_contributor import ( + DatasetRelatedItemContributor, +) +from model.dataset_metadata.dataset_related_item_identifier import ( + DatasetRelatedItemIdentifier, +) +from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther +from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle -from .db import db -from .participant import Participant -from .study import Study, StudyException -from .user import User from .dataset import Dataset - -from .email_verification import EmailVerification -from .token_blacklist import TokenBlacklist -from .user_details import UserDetails from .dataset_contributor import DatasetContributor -from .invited_study_contributor import StudyInvitedContributor -from .study_contributor import StudyContributor - from .dataset_metadata.dataset_access import DatasetAccess +from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_consent import DatasetConsent from .dataset_metadata.dataset_contributor_affiliation import ( DatasetContributorAffiliation, @@ -23,25 +20,22 @@ from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder -from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_managing_organization import DatasetManagingOrganization from .dataset_metadata.dataset_other import DatasetOther from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights -from .dataset_metadata.dataset_title import DatasetTitle from .dataset_metadata.dataset_subject import DatasetSubject - -from model.dataset_metadata.dataset_related_item_contributor import ( - DatasetRelatedItemContributor, -) -from model.dataset_metadata.dataset_related_item_identifier import ( - DatasetRelatedItemIdentifier, -) -from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther -from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle -from model.dataset_metadata.dataset_related_item import DatasetRelatedItem - +from .dataset_metadata.dataset_title import DatasetTitle +from .dataset_versions import DatasetVersions +from .db import db +from .email_verification import EmailVerification +from .invited_study_contributor import StudyInvitedContributor +from .participant import Participant +from .study import Study, StudyException +from .study_contributor import StudyContributor +from .study_metadata.arm import Arm +from .study_metadata.identifiers import Identifiers from .study_metadata.study_arm import StudyArm from .study_metadata.study_available_ipd import StudyAvailableIpd from .study_metadata.study_contact import StudyContact @@ -58,9 +52,10 @@ from .study_metadata.study_reference import StudyReference from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus -from .study_metadata.identifiers import Identifiers -from .study_metadata.arm import Arm - +from .token_blacklist import TokenBlacklist +from .user import User +from .user_details import UserDetails +from .version import Version __all__ = [ "Study", diff --git a/model/dataset.py b/model/dataset.py index 91accda4..9adb3d29 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -1,7 +1,8 @@ +import datetime import uuid from datetime import timezone + from sqlalchemy.sql.expression import true -import datetime import model diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index 2f92b2e4..844d8537 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -1,4 +1,5 @@ import uuid + from .db import db diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 0a183179..366aeaf7 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index 9a51f15c..dca8470d 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 38f7fd16..2bf44fd9 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index be0e02c8..f76f1ac3 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index 21054f3e..71bbb1d0 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index 566a212b..e639aff1 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index ce3a056d..1b4f654d 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index f1375971..fe71ec16 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py index 4e8b786b..5fe38e99 100644 --- a/model/dataset_metadata/dataset_managing_organization.py +++ b/model/dataset_metadata/dataset_managing_organization.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index f8409b2a..a0607b2f 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -1,8 +1,10 @@ import uuid -from ..db import db + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY +from ..db import db + class DatasetOther(db.Model): def __init__(self, dataset): diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index b3c0594c..321540b0 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 4d6c48b0..10063033 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index 4007888a..1975a761 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index 33bedbcf..5340d5f0 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 390fd4fa..f428f7ba 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py index b7910694..bc7071ab 100644 --- a/model/dataset_metadata/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 79e23a64..642cd771 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index b8cb70b2..ec46da69 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index b62342be..39b0de75 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index 739b5253..ca413f46 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/email_verification.py b/model/email_verification.py index 63d481fd..81cb3ca8 100644 --- a/model/email_verification.py +++ b/model/email_verification.py @@ -1,5 +1,6 @@ -from datetime import timezone import datetime +from datetime import timezone + from .db import db diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index aa727f1d..219f1f76 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,8 +1,8 @@ +import datetime import uuid -from datetime import datetime +from datetime import datetime, timezone + from .db import db -import datetime -from datetime import timezone class StudyInvitedContributor(db.Model): diff --git a/model/participant.py b/model/participant.py index e500b54c..f1217dea 100644 --- a/model/participant.py +++ b/model/participant.py @@ -1,8 +1,10 @@ +import datetime import uuid -import model from datetime import timezone + +import model + from .db import db -import datetime class Participant(db.Model): diff --git a/model/study.py b/model/study.py index cc28a03a..a7885610 100644 --- a/model/study.py +++ b/model/study.py @@ -1,12 +1,14 @@ -import uuid -from datetime import datetime -from datetime import timezone -import model -from .db import db import datetime +import uuid +from datetime import datetime, timezone + from flask import g + +import model from apis import exception +from .db import db + class StudyException(Exception): pass diff --git a/model/study_contributor.py b/model/study_contributor.py index 055a6bfc..cf7236d6 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,6 +1,6 @@ -from datetime import datetime -from datetime import timezone import datetime +from datetime import datetime, timezone + from .db import db diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index f2c9d1e8..488e09cb 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -1,10 +1,11 @@ +import datetime import uuid +from datetime import timezone + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY from ..db import db -from datetime import timezone -import datetime class StudyArm(db.Model): diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index e4f4d58d..2ba837dd 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -1,8 +1,8 @@ +import datetime import uuid +from datetime import timezone from ..db import db -from datetime import timezone -import datetime class StudyAvailableIpd(db.Model): diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index e7ea1126..1ec50c44 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -1,7 +1,8 @@ +import datetime import uuid -from ..db import db from datetime import timezone -import datetime + +from ..db import db class StudyContact(db.Model): diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index f0cb9979..b5df990d 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -1,4 +1,5 @@ import uuid + from ..db import db diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 041e23ae..944d6662 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -1,4 +1,5 @@ import uuid + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index af7ee5c0..65ccad3b 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -1,8 +1,10 @@ import uuid -from ..db import db + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY +from ..db import db + class StudyEligibility(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index fed33c4b..7341c8cc 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -1,7 +1,8 @@ +import datetime import uuid -from ..db import db from datetime import timezone -import datetime + +from ..db import db class StudyIdentification(db.Model): diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index e6f801b7..9e0e37eb 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -1,9 +1,11 @@ +import datetime import uuid -from ..db import db +from datetime import timezone + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from datetime import timezone -import datetime + +from ..db import db class StudyIntervention(db.Model): diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 7b788420..d091781c 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -1,8 +1,10 @@ import uuid -from ..db import db + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY +from ..db import db + class StudyIpdsharing(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index c62be37d..53c23a06 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -1,7 +1,8 @@ +import datetime import uuid -from ..db import db from datetime import timezone -import datetime + +from ..db import db class StudyLink(db.Model): diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 5a3533c2..d5ba2d86 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -1,7 +1,8 @@ +import datetime import uuid -from ..db import db from datetime import timezone -import datetime + +from ..db import db class StudyLocation(db.Model): diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 95f362e7..bb1dced0 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -1,4 +1,5 @@ import uuid + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 4cc78a26..a4d90ebe 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -1,7 +1,8 @@ +import datetime import uuid -from ..db import db from datetime import timezone -import datetime + +from ..db import db class StudyOverallOfficial(db.Model): diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 693276c4..1b6ecfb1 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -1,8 +1,8 @@ +import datetime import uuid +from datetime import timezone from ..db import db -from datetime import timezone -import datetime class StudyReference(db.Model): diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index e362fd10..08292fa9 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -1,8 +1,10 @@ import uuid -from ..db import db + from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY +from ..db import db + class StudySponsorsCollaborators(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/user.py b/model/user.py index 56ee689f..2b3a1efe 100644 --- a/model/user.py +++ b/model/user.py @@ -1,11 +1,12 @@ -import uuid -from datetime import datetime -from .db import db -from datetime import timezone import datetime +import uuid +from datetime import datetime, timezone + import app import model +from .db import db + class User(db.Model): def __init__(self, password, data): diff --git a/model/user_details.py b/model/user_details.py index e3ba685c..d2ffebdb 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -1,4 +1,5 @@ import uuid + from .db import db diff --git a/model/version.py b/model/version.py index ed7eb3ca..5daf5227 100644 --- a/model/version.py +++ b/model/version.py @@ -1,7 +1,9 @@ +import datetime import uuid from datetime import timezone -import datetime + from model.dataset import Dataset + from .db import db version_participants = db.Table( From dcc234257d8de6be2dffc85a4ebd0962805d472a Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 10 Oct 2023 18:20:04 -0700 Subject: [PATCH 268/505] fix: unused imports --- apis/contributor.py | 4 ++-- apis/dataset.py | 2 +- apis/dataset_metadata/dataset_access.py | 3 +-- apis/dataset_metadata/dataset_alternate_identifier.py | 2 +- apis/dataset_metadata/dataset_consent.py | 2 +- apis/dataset_metadata/dataset_date.py | 2 +- apis/dataset_metadata/dataset_managing_organization.py | 6 ++---- apis/dataset_metadata/dataset_other.py | 2 +- apis/dataset_metadata/dataset_related_item.py | 2 +- 9 files changed, 11 insertions(+), 14 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 0b96f389..0474f340 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -102,7 +102,7 @@ def put(self, study_id: int, user_id: int): # TODO: Owners downgrading themselves if user != g.user: grantee_level = list(grants.keys()).index(grantee.permission) # 1 - new_level = list(grants.keys()).index(permission) # 2 + new_level = list(grants.keys()).index(permission) # 2 granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level and new_level <= grantee_level: return ( @@ -110,7 +110,7 @@ def put(self, study_id: int, user_id: int): 403, ) grantee.permission = permission - db.session.commit() + model.db.session.commit() return grantee.to_dict(), 200 @api.doc("contributor delete") diff --git a/apis/dataset.py b/apis/dataset.py index 7f3bf2c5..2bbe7059 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -107,7 +107,7 @@ def delete(self, study_id, dataset_id): @api.route("/study//dataset//version/") -class Version(Resource): +class VersionResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("dataset version") diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index bc9cef76..2157533d 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -2,7 +2,7 @@ from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetAccess, db +from model import Dataset, db dataset_access = api.model( "DatasetAccess", @@ -28,7 +28,6 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_access_] def put(self, study_id: int, dataset_id: int): - data = request.json dataset_ = Dataset.query.get(dataset_id) dataset_access_ = dataset_.dataset_access.update(request.json) db.session.commit() diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 0cf35726..b7dfae25 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -33,7 +33,7 @@ def post(self, study_id: int, dataset_id: int): for i in data: if "id" in i and i["id"]: dataset_identifier_ = DatasetAlternateIdentifier.query.get(i["id"]) - if dataset_identifier_ == None: + if not dataset_identifier_: return f"Study link {i['id']} Id is not found", 404 dataset_identifier_.update(i) list_of_elements.append(dataset_identifier_.to_dict()) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index c670dce0..b63e63d7 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -2,7 +2,7 @@ from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetConsent, db +from model import Dataset, db dataset_consent = api.model( "DatasetConsent", diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 459c14ee..fc5b9a92 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -2,7 +2,7 @@ from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetDate, db +from model import Dataset, db dataset_date = api.model( "DatasetDate", diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 032e65d9..74d7b9d7 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -2,7 +2,7 @@ from flask_restx import Resource, fields from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetManagingOrganization, db +from model import Dataset, db managing_organization = api.model( "DatasetManagingOrganization", @@ -28,8 +28,6 @@ def get(self, study_id: int, dataset_id: int): def put(self, study_id: int, dataset_id: int): data = request.json dataset_ = Dataset.query.get(dataset_id) - managing_organization_ = dataset_.dataset_managing_organization.update( - request.json - ) + managing_organization_ = dataset_.dataset_managing_organization.update(data) db.session.commit() return managing_organization_.to_dict() diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index a198ac47..8b304ad6 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -32,6 +32,6 @@ def get(self, study_id: int, dataset_id: int): def put(self, study_id: int, dataset_id: int): data = request.json dataset_ = Dataset.query.get(dataset_id) - dataset_other_ = dataset_.dataset_other.update(request.json) + dataset_other_ = dataset_.dataset_other.update(data) db.session.commit() return dataset_other_.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 4b863fde..7a70283a 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -40,6 +40,6 @@ class DatasetRelatedItemUpdate(Resource): def put(self, study_id: int, dataset_id: int, related_item_id: int): data = request.json dataset_related_item_ = DatasetRelatedItem.query.get(related_item_id) - dataset_related_item_.update(request.json) + dataset_related_item_.update(data) db.session.commit() return dataset_related_item_.to_dict() From 907b8e88d94e6d820abaacdf73190a654821a8e9 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 11 Oct 2023 11:27:02 -0700 Subject: [PATCH 269/505] fix: unused imports and assignments --- apis/authentication.py | 3 +-- apis/participant.py | 4 ++-- apis/study_metadata/study_other.py | 2 +- apis/study_metadata/study_status.py | 2 +- app.py | 6 +++--- model/invited_study_contributor.py | 7 ++++--- model/study.py | 10 +++++----- model/study_contributor.py | 5 +++-- model/study_metadata/identifiers.py | 3 +-- model/user.py | 5 +++-- tests/functional/test_study_api.py | 4 ---- 11 files changed, 24 insertions(+), 27 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index d68ce102..0058b004 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -91,8 +91,7 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=200), + "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=200), "jti": str(uuid.uuid4()), }, config.secret, diff --git a/apis/participant.py b/apis/participant.py index 8ab874df..2e526563 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -1,7 +1,7 @@ -from flask import Response, g, jsonify, request +from flask import Response, request from flask_restx import Namespace, Resource, fields -from model import Participant, Study, StudyContributor, db +from model import Participant, Study, db from .authentication import is_granted diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 1d8f39bc..9604d19b 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -80,7 +80,7 @@ def put(self, study_id: int): # todo: rename class @api.route("/study//metadata/conditions") -class StudyOversightResource(Resource): +class StudyConditionsResource(Resource): """Study Conditions Metadata""" @api.doc("conditions") diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index eec16fa8..afebbb53 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -5,7 +5,7 @@ from apis.study_metadata_namespace import api from model import Study, db -from ..authentication import is_granted, is_study_metadata +from ..authentication import is_granted study_status = api.model( "StudyStatus", diff --git a/app.py b/app.py index 012d801d..cf2831ac 100644 --- a/app.py +++ b/app.py @@ -3,7 +3,7 @@ from datetime import timezone import jwt -from flask import Flask, g, make_response, request +from flask import Flask, request from flask_bcrypt import Bcrypt from flask_cors import CORS from sqlalchemy import MetaData @@ -172,7 +172,7 @@ def validation_exception_handler(error): @app.cli.command("destroy-schema") def destroy_schema(): engine = model.db.session.get_bind() - with engine.begin() as conn: + with engine.begin(): """Create the database schema.""" model.db.drop_all() @@ -183,7 +183,7 @@ def destroy_schema(): table_names = [table.name for table in metadata.tables.values()] # print(table_names) if len(table_names) == 0: - with engine.begin() as conn: + with engine.begin(): """Create the database schema.""" model.db.create_all() return app diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 219f1f76..1f13b71f 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,18 +1,19 @@ import datetime import uuid -from datetime import datetime, timezone from .db import db +# from datetime import datetime, timezone + class StudyInvitedContributor(db.Model): def __init__(self, study, email_address, permission): self.id = str(uuid.uuid4()) self.study = study self.permission = permission - self.invited_on = datetime.datetime.now(timezone.utc).timestamp() + self.invited_on = datetime.datetime.now(datetime.timezone.utc).timestamp() self.email_address = email_address - self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() self.token = "" __tablename__ = "invited_study_contributor" diff --git a/model/study.py b/model/study.py index a7885610..4ad342c0 100644 --- a/model/study.py +++ b/model/study.py @@ -1,6 +1,5 @@ import datetime import uuid -from datetime import datetime, timezone from flask import g @@ -9,6 +8,8 @@ from .db import db +# from datetime import datetime, timezone + class StudyException(Exception): pass @@ -19,7 +20,7 @@ class Study(db.Model): def __init__(self): self.id = str(uuid.uuid4()) - self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() # self.study_status = model.StudyStatus(self) self.study_sponsors_collaborators = model.StudySponsorsCollaborators(self) @@ -153,7 +154,6 @@ def to_dict(self): owner = self.study_contributors.filter( model.StudyContributor.permission == "owner" ).first() - user = model.User.query.get(g.user.id) contributor_permission = self.study_contributors.filter( model.StudyContributor.user_id == g.user.id ).first() @@ -190,7 +190,7 @@ def update(self, data): self.title = data["title"] self.image = data["image"] - self.updated_on = datetime.datetime.now(timezone.utc).timestamp() + self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() def validate(self): """Validates the study""" @@ -202,7 +202,7 @@ def validate(self): return violations def touch(self): - self.updated_on = datetime.datetime.now(timezone.utc).timestamp() + self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() def add_user_to_study(self, user, permission): """add user to study""" diff --git a/model/study_contributor.py b/model/study_contributor.py index cf7236d6..4d02d4aa 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,15 +1,16 @@ import datetime -from datetime import datetime, timezone from .db import db +# from datetime import datetime, timezone + class StudyContributor(db.Model): def __init__(self, study, user, permission): self.study = study self.user = user self.permission = permission - self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() __tablename__ = "study_contributor" permission = db.Column(db.String, nullable=False) diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index fa5d4cd9..4b66db50 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -24,8 +24,7 @@ def to_dict(self): for identifier in sorted_study_identifications if not identifier.secondary ] - ) - != 0 + ) != 0 else [], "secondary": [ identifier.to_dict() diff --git a/model/user.py b/model/user.py index 2b3a1efe..2f9fe062 100644 --- a/model/user.py +++ b/model/user.py @@ -1,17 +1,18 @@ import datetime import uuid -from datetime import datetime, timezone import app import model from .db import db +# from datetime import datetime, timezone + class User(db.Model): def __init__(self, password, data): self.id = str(uuid.uuid4()) - self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() self.set_password(password, data) self.user_details = model.UserDetails(self) diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index 7374e952..eadc6fa1 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -1,5 +1,4 @@ """Tests for API endpoints related to studies""" -import json import os from app import create_app @@ -19,9 +18,6 @@ def test_should_return_studies(): # Create a test client using the Flask application configured for testing with flask_app.test_client() as test_client: response = test_client.get("/study") - # Convert the response data from JSON to a Python dictionary - response_data = json.loads(response.data) - # Check the response is correct assert response.status_code == 200 From 3962aac42971de2773d90e26f381add713ec3d72 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 11 Oct 2023 17:23:14 -0700 Subject: [PATCH 270/505] fix: auth permission for admin --- apis/contributor.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/apis/contributor.py b/apis/contributor.py index 0474f340..1b26b1b7 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -135,7 +135,14 @@ def delete(self, study_id: int, user_id: str): invited_grantee = model.StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() - can_delete = invited_grantee.permission in grants[granter.permission] + invited_grants = OrderedDict() + invited_grants["viewer"] = [] + invited_grants["editor"] = [] + invited_grants["admin"] = ["viewer", "editor", "admin"] + invited_grants["owner"] = ["editor", "viewer", "admin"] + can_delete = ( + invited_grantee.permission in invited_grants[granter.permission] + ) if not can_delete: return f"User cannot delete {invited_grantee.permission}", 403 model.db.session.delete(invited_grantee) From b6f8325a26331804bd774655ebf272ed90463eb8 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 11 Oct 2023 17:23:37 -0700 Subject: [PATCH 271/505] fix: fixed flake8 errors --- apis/__init__.py | 49 +++++++++++++- apis/authentication.py | 38 +++++------ apis/dataset.py | 66 ++++++++++--------- apis/dataset_metadata/dataset_access.py | 8 +-- .../dataset_alternate_identifier.py | 24 ++++--- apis/dataset_metadata/dataset_consent.py | 8 +-- apis/dataset_metadata/dataset_date.py | 8 +-- .../dataset_de_ident_level.py | 8 +-- apis/dataset_metadata/dataset_description.py | 20 +++--- apis/dataset_metadata/dataset_funder.py | 16 ++--- .../dataset_managing_organization.py | 8 +-- apis/dataset_metadata/dataset_other.py | 8 +-- apis/dataset_metadata/dataset_readme.py | 8 +-- apis/dataset_metadata/dataset_record_keys.py | 8 +-- apis/dataset_metadata/dataset_related_item.py | 16 ++--- .../dataset_related_item_contributor.py | 4 +- .../dataset_related_item_identifier.py | 4 +- .../dataset_related_item_other.py | 4 +- .../dataset_related_item_title.py | 4 +- apis/dataset_metadata/dataset_rights.py | 16 ++--- apis/dataset_metadata/dataset_subject.py | 16 ++--- apis/dataset_metadata/dataset_title.py | 20 +++--- apis/participant.py | 22 +++---- apis/study.py | 42 ++++++------ apis/study_metadata/study_arm.py | 28 ++++---- apis/study_metadata/study_available_ipd.py | 24 +++---- apis/study_metadata/study_contact.py | 24 +++---- apis/study_metadata/study_description.py | 10 +-- apis/study_metadata/study_design.py | 10 +-- apis/study_metadata/study_eligibility.py | 8 +-- apis/study_metadata/study_identification.py | 34 +++++----- apis/study_metadata/study_intervention.py | 22 +++---- apis/study_metadata/study_ipdsharing.py | 8 +-- apis/study_metadata/study_link.py | 22 +++---- apis/study_metadata/study_location.py | 22 +++---- apis/study_metadata/study_other.py | 20 +++--- apis/study_metadata/study_overall_official.py | 24 +++---- apis/study_metadata/study_reference.py | 22 +++---- .../study_sponsors_collaborators.py | 14 ++-- apis/study_metadata/study_status.py | 10 +-- apis/user.py | 8 +-- model/study.py | 2 +- model/study_metadata/identifiers.py | 3 +- 43 files changed, 398 insertions(+), 342 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index ba9e7847..7bebacc9 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -35,7 +35,7 @@ from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd from .study_metadata.study_contact import api as contact -from .study_metadata.study_description import api as description +from .study_metadata.study_description import api as study_description from .study_metadata.study_design import api as design from .study_metadata.study_eligibility import api as eligibility from .study_metadata.study_identification import api as identification @@ -56,6 +56,53 @@ doc="/docs", ) +__all__ = [ + "dataset_metadata_namespace", + "study_metadata_namespace", + "authentication", + "contributors_api", + "dataset_api", + "access", + "alternate_identifier", + "consent", + "date", + "de_ident_level", + "description", + "funder", + "managing_organization", + "dataset_other", + "readme", + "record_keys", + "related_item", + "related_item_contributor", + "related_item_identifier", + "api", + "related_item_other", + "related_item_title", + "rights", + "subject", + "title", + "participants_api", + "study_api", + "arm", + "available_ipd", + "contact", + "design", + "eligibility", + "intervention", + "ipdsharing", + "link", + "location", + "other", + "overall_official", + "reference", + "sponsors_collaborator", + "status", + "user", + "identification", + "study_description", +] + api.add_namespace(dataset_metadata_namespace) api.add_namespace(study_metadata_namespace) diff --git a/apis/authentication.py b/apis/authentication.py index 0058b004..8f313ce4 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -8,14 +8,7 @@ from flask_restx import Namespace, Resource, fields import config -from model import ( - Study, - StudyContributor, - StudyInvitedContributor, - TokenBlacklist, - User, - db, -) +import model api = Namespace("Authentication", description="Authentication paths", path="/") @@ -53,18 +46,20 @@ def post(self): pattern = r"^[\w\.-]+@[\w\.-]+\.\w+$" if not data["email_address"] or not re.match(pattern, data["email_address"]): return "Email address is invalid", 422 - user = User.query.filter_by(email_address=data["email_address"]).one_or_none() + user = model.User.query.filter_by( + email_address=data["email_address"] + ).one_or_none() if user: return "This email address is already in use", 409 - invitations = StudyInvitedContributor.query.filter_by( + invitations = model.StudyInvitedContributor.query.filter_by( email_address=data["email_address"] ).all() - new_user = User.from_data(data) + new_user = model.User.from_data(data) for invite in invitations: invite.study.add_user_to_study(new_user, invite.permission) - db.session.delete(invite) - db.session.add(new_user) - db.session.commit() + model.db.session.delete(invite) + model.db.session.add(new_user) + model.db.session.commit() return f"Hi, {new_user.email_address}, you have successfully signed up", 201 @@ -79,7 +74,7 @@ def post(self): Also, it sets token for logged user along with expiration date""" data = request.json email_address = data["email_address"] - user = User.query.filter_by(email_address=email_address).one_or_none() + user = model.User.query.filter_by(email_address=email_address).one_or_none() if not user: return "Invalid credentials", 401 validate_pass = user.check_password(data["password"]) @@ -91,7 +86,8 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=200), + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=200), "jti": str(uuid.uuid4()), }, config.secret, @@ -117,10 +113,10 @@ def authentication(): decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: return - token_blacklist = TokenBlacklist.query.get(decoded["jti"]) + token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) if token_blacklist: return - user = User.query.get(decoded["user"]) + user = model.User.query.get(decoded["user"]) g.user = user @@ -145,8 +141,8 @@ def authorization(): def is_granted(permission: str, study=None): """filters users and checks whether current permission equal to passed permission""" - contributor = StudyContributor.query.filter( - StudyContributor.user == g.user, StudyContributor.study == study + contributor = model.StudyContributor.query.filter( + model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() if not contributor: return False @@ -204,7 +200,7 @@ def is_granted(permission: str, study=None): def is_study_metadata(study_id: int): - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/dataset.py b/apis/dataset.py index 2bbe7059..fb4980ad 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,7 +1,7 @@ from flask import Response, jsonify, request from flask_restx import Namespace, Resource, fields -from model import Dataset, Participant, Study, Version, db +import model from .authentication import is_granted @@ -40,8 +40,8 @@ class DatasetList(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset) def get(self, study_id): - study = Study.query.get(study_id) - datasets = Dataset.query.filter_by(study=study) + study = model.Study.query.get(study_id) + datasets = model.Dataset.query.filter_by(study=study) return [d.to_dict() for d in datasets] @api.response(201, "Success") @@ -49,13 +49,13 @@ def get(self, study_id): @api.doc("update dataset") @api.expect(dataset) def post(self, study_id): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 # todo if study.participant id== different study Throw error - dataset_ = Dataset.from_data(study, request.json) - db.session.add(dataset_) - db.session.commit() + dataset_ = model.Dataset.from_data(study, request.json) + model.db.session.add(dataset_) + model.db.session.commit() return dataset_.to_dict() @@ -67,32 +67,32 @@ class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") def get(self, study_id, dataset_id): - data_obj = Dataset.query.get(dataset_id) + data_obj = model.Dataset.query.get(dataset_id) return data_obj.to_dict() @api.response(201, "Success") @api.response(400, "Validation Error") def put(self, study_id, dataset_id): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("update_dataset", study): return "Access denied, you can not modify", 403 data = request.json - data_obj = Dataset.query.get(dataset_id) + data_obj = model.Dataset.query.get(dataset_id) data_obj.update(data) - db.session.commit() + model.db.session.commit() return data_obj.to_dict() @api.response(201, "Success") @api.response(400, "Validation Error") def delete(self, study_id, dataset_id): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - data_obj = Dataset.query.get(dataset_id) + data_obj = model.Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: - db.session.delete(version) - db.session.delete(data_obj) - db.session.commit() + model.db.session.delete(version) + model.db.session.delete(data_obj) + model.db.session.commit() dataset_ = study.dataset return [d.to_dict() for d in dataset_], 201 @@ -113,28 +113,28 @@ class VersionResource(Resource): @api.doc("dataset version") @api.marshal_with(dataset_versions_model) def get(self, study_id, dataset_id, version_id): - dataset_version = Version.query.get(version_id) + dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() def put(self, study_id, dataset_id, version_id): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("publish_dataset", study): return "Access denied, you can not modify", 403 - data_version_obj = Version.query.get(version_id) + data_version_obj = model.Version.query.get(version_id) data_version_obj.update(request.json) - db.session.commit() + model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 def delete(self, study_id, dataset_id, version_id): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - data_obj = Dataset.query.get(dataset_id) + data_obj = model.Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: - db.session.delete(version) - db.session.commit() - db.session.delete(data_obj) - db.session.commit() + model.db.session.delete(version) + model.db.session.commit() + model.db.session.delete(data_obj) + model.db.session.commit() return Response(status=204) @@ -143,13 +143,15 @@ def delete(self, study_id, dataset_id, version_id): @api.response(400, "Validation Error") class VersionList(Resource): def post(self, study_id: int, dataset_id: int): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("publish_version", study): return "Access denied, you can not modify", 403 data = request.json - data["participants"] = [Participant.query.get(i) for i in data["participants"]] - data_obj = Dataset.query.get(dataset_id) - dataset_versions = Version.from_data(data_obj, data) - db.session.add(dataset_versions) - db.session.commit() + data["participants"] = [ + model.Participant.query.get(i) for i in data["participants"] + ] + data_obj = model.Dataset.query.get(dataset_id) + dataset_versions = model.Version.from_data(data_obj, data) + model.db.session.add(dataset_versions) + model.db.session.commit() return jsonify(dataset_versions.to_dict()) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index 2157533d..3b9c7da2 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db dataset_access = api.model( "DatasetAccess", @@ -23,12 +23,12 @@ class DatasetAccessResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_access) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_access_ = dataset_.dataset_access return [d.to_dict() for d in dataset_access_] def put(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_access_ = dataset_.dataset_access.update(request.json) - db.session.commit() + model.db.session.commit() return dataset_access_.to_dict() diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index b7dfae25..d5d6c5b2 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetAlternateIdentifier, db dataset_identifier = api.model( "DatasetAlternateIdentifier", @@ -22,26 +22,30 @@ class DatasetAlternateIdentifierResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_identifier) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_identifier_ = dataset_.dataset_alternate_identifier return [d.to_dict() for d in dataset_identifier_] def post(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) + data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: if "id" in i and i["id"]: - dataset_identifier_ = DatasetAlternateIdentifier.query.get(i["id"]) + dataset_identifier_ = model.DatasetAlternateIdentifier.query.get( + i["id"] + ) if not dataset_identifier_: return f"Study link {i['id']} Id is not found", 404 dataset_identifier_.update(i) list_of_elements.append(dataset_identifier_.to_dict()) elif "id" not in i or not i["id"]: - dataset_identifier_ = DatasetAlternateIdentifier.from_data(data_obj, i) - db.session.add(dataset_identifier_) + dataset_identifier_ = model.DatasetAlternateIdentifier.from_data( + data_obj, i + ) + model.db.session.add(dataset_identifier_) list_of_elements.append(dataset_identifier_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @api.route( @@ -49,7 +53,9 @@ def post(self, study_id: int, dataset_id: int): ) class DatasetAlternateIdentifierUpdate(Resource): def put(self, study_id: int, dataset_id: int, identifier_id: int): - dataset_identifier_ = DatasetAlternateIdentifier.query.get(identifier_id) + dataset_identifier_ = model.DatasetAlternateIdentifier.query.get( + identifier_id + ) dataset_identifier_.update(request.json) - db.session.commit() + model.db.session.commit() return dataset_identifier_.to_dict() diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index b63e63d7..fbea4753 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db dataset_consent = api.model( "DatasetConsent", @@ -26,13 +26,13 @@ class DatasetConsentResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_consent) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_consent_ = dataset_.dataset_consent return [d.to_dict() for d in dataset_consent_] def put(self, study_id: int, dataset_id: int): data = request.json - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_consent_ = dataset_.dataset_consent.update(data) - db.session.commit() + model.db.session.commit() return dataset_consent_.to_dict() diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index fc5b9a92..ab2b9550 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db dataset_date = api.model( "DatasetDate", @@ -22,13 +22,13 @@ class DatasetDateResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_date) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_date_ = dataset_.dataset_date return [d.to_dict() for d in dataset_date_] def put(self, study_id: int, dataset_id: int): data = request.json - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_date_ = dataset_.dataset_date.update(data) - db.session.commit() + model.db.session.commit() return dataset_date_.to_dict() diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 5fcb0898..3d38170f 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db de_ident_level = api.model( "DatasetDeIdentLevel", @@ -26,13 +26,13 @@ class DatasetDeIdentLevelResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(de_ident_level) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) de_ident_level_ = dataset_.dataset_de_ident_level return [d.to_dict() for d in de_ident_level_] def put(self, study_id: int, dataset_id: int): data = request.json - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) de_ident_level_ = dataset_.dataset_de_ident_level.update(data) - db.session.commit() + model.db.session.commit() return de_ident_level_.to_dict() diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 1270d550..46253c9a 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetDescription, db dataset_description = api.model( "DatasetDescription", @@ -21,24 +21,24 @@ class DatasetDescriptionResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_description) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_description_ = dataset_.dataset_description return [d.to_dict() for d in dataset_description_] def post(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) + data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: if "id" in i and i["id"]: - dataset_description_ = DatasetDescription.query.get(i["id"]) + dataset_description_ = model.DatasetDescription.query.get(i["id"]) dataset_description_.update(i) list_of_elements.append(dataset_description_.to_dict()) elif "id" not in i or not i["id"]: - dataset_description_ = DatasetDescription.from_data(data_obj, i) - db.session.add(dataset_description_) + dataset_description_ = model.DatasetDescription.from_data(data_obj, i) + model.db.session.add(dataset_description_) list_of_elements.append(dataset_description_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @api.route( @@ -46,7 +46,7 @@ def post(self, study_id: int, dataset_id: int): ) class DatasetDescriptionUpdate(Resource): def delete(self, study_id: int, dataset_id: int, description_id: int): - dataset_description_ = DatasetDescription.query.get(description_id) - db.session.delete(dataset_description_) - db.session.commit() + dataset_description_ = model.DatasetDescription.query.get(description_id) + model.db.session.delete(dataset_description_) + model.db.session.commit() return dataset_description_.to_dict() diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index cba5437e..03a840af 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetFunder, db dataset_funder = api.model( "DatasetFunder", @@ -26,23 +26,23 @@ class DatasetFunderResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_funder) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_funder_ = dataset_.dataset_funder return [d.to_dict() for d in dataset_funder_] def post(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_funder_ = DatasetFunder.from_data(data_obj, data) - db.session.add(dataset_funder_) - db.session.commit() + data_obj = model.Dataset.query.get(dataset_id) + dataset_funder_ = model.DatasetFunder.from_data(data_obj, data) + model.db.session.add(dataset_funder_) + model.db.session.commit() return dataset_funder_.to_dict() @api.route("/study//dataset//metadata/funder/") class DatasetFunderUpdate(Resource): def put(self, study_id: int, dataset_id: int, funder_id: int): - dataset_funder_ = DatasetFunder.query.get(funder_id) + dataset_funder_ = model.DatasetFunder.query.get(funder_id) dataset_funder_.update(request.json) - db.session.commit() + model.db.session.commit() return dataset_funder_.to_dict() diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 74d7b9d7..4a9f778b 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db managing_organization = api.model( "DatasetManagingOrganization", @@ -21,13 +21,13 @@ class DatasetManagingOrganizationResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(managing_organization) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) managing_organization_ = dataset_.dataset_managing_organization return [d.to_dict() for d in managing_organization_] def put(self, study_id: int, dataset_id: int): data = request.json - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) managing_organization_ = dataset_.dataset_managing_organization.update(data) - db.session.commit() + model.db.session.commit() return managing_organization_.to_dict() diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 8b304ad6..c93ac970 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db dataset_other = api.model( "DatasetOther", @@ -25,13 +25,13 @@ class DatasetOtherResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_other) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other return [d.to_dict() for d in dataset_other_] def put(self, study_id: int, dataset_id: int): data = request.json - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other.update(data) - db.session.commit() + model.db.session.commit() return dataset_other_.to_dict() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index 043cd764..b53f43c7 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db dataset_readme = api.model( "DatasetReadme", @@ -17,13 +17,13 @@ class DatasetReadmeResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_readme) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_readme_ = dataset_.dataset_readme return [d.to_dict() for d in dataset_readme_] def put(self, study_id: int, dataset_id: int): data = request.json - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_readme_ = dataset_.dataset_readme.update(data) - db.session.commit() + model.db.session.commit() return dataset_readme_.to_dict() diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 1467ad53..a440111b 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, db dataset_record_keys = api.model( "DatasetRecordKeys", @@ -21,13 +21,13 @@ class DatasetRecordKeysResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_record_keys) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_record_keys_ = dataset_.dataset_record_keys return [d.to_dict() for d in dataset_record_keys_] def put(self, study_id: int, dataset_id: int): data = request.json - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_record_keys_ = dataset_.dataset_de_ident_level.update(data) - db.session.commit() + model.db.session.commit() return dataset_record_keys_.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 7a70283a..2aebf8a9 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetRelatedItem, db dataset_related_item = api.model( "DatasetRelatedItem", @@ -21,16 +21,16 @@ class DatasetRelatedItemResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(dataset_related_item) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_ = dataset_.dataset_related_item return [d.to_dict() for d in dataset_related_item_] def post(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_related_item_ = DatasetRelatedItem.from_data(data_obj, data) - db.session.add(dataset_related_item_) - db.session.commit() + data_obj = model.Dataset.query.get(dataset_id) + dataset_related_item_ = model.DatasetRelatedItem.from_data(data_obj, data) + model.db.session.add(dataset_related_item_) + model.db.session.commit() return dataset_related_item_.to_dict() @api.route( @@ -39,7 +39,7 @@ def post(self, study_id: int, dataset_id: int): class DatasetRelatedItemUpdate(Resource): def put(self, study_id: int, dataset_id: int, related_item_id: int): data = request.json - dataset_related_item_ = DatasetRelatedItem.query.get(related_item_id) + dataset_related_item_ = model.DatasetRelatedItem.query.get(related_item_id) dataset_related_item_.update(data) - db.session.commit() + model.db.session.commit() return dataset_related_item_.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py index 96def854..d7d2a09a 100644 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ b/apis/dataset_metadata/dataset_related_item_contributor.py @@ -1,7 +1,7 @@ from flask_restx import Resource +import model from apis.dataset_metadata_namespace import api -from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", @@ -21,6 +21,6 @@ class DatasetRelatedItemContributorResource(Resource): @api.response(400, "Validation Error") # @api.marshal_with(dataset_related_item_contributor) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_contributor_ = dataset_.dataset_related_item_contributor return [d.to_dict() for d in dataset_related_item_contributor_] diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py index 17e54db3..d1f6695b 100644 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ b/apis/dataset_metadata/dataset_related_item_identifier.py @@ -1,7 +1,7 @@ from flask_restx import Resource +import model from apis.dataset_metadata_namespace import api -from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", @@ -21,6 +21,6 @@ class DatasetRelatedItemContributorResource(Resource): @api.response(400, "Validation Error") # @api.marshal_with(dataset_related_item_contributor) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_ = dataset_.dataset_related_item return [d.to_dict() for d in dataset_related_item_] diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py index be39a9e7..621a4920 100644 --- a/apis/dataset_metadata/dataset_related_item_other.py +++ b/apis/dataset_metadata/dataset_related_item_other.py @@ -1,7 +1,7 @@ from flask_restx import Resource +import model from apis.dataset_metadata_namespace import api -from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemContributor", @@ -22,6 +22,6 @@ class DatasetRelatedItemContributorResource(Resource): # @api.param("id", "The dataset identifier") # @api.marshal_with(dataset_related_item_contributor) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_ = dataset_.dataset_related_item return [d.to_dict() for d in dataset_related_item_] diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py index f6c00841..1bc6d243 100644 --- a/apis/dataset_metadata/dataset_related_item_title.py +++ b/apis/dataset_metadata/dataset_related_item_title.py @@ -1,7 +1,7 @@ from flask_restx import Resource +import model from apis.dataset_metadata_namespace import api -from model import Dataset # dataset_related_item_contributor = api.model( # "DatasetRelatedItemTitle", @@ -22,6 +22,6 @@ class DatasetRelatedItemTitleResource(Resource): # @api.param("id", "The dataset identifier") # @api.marshal_with(dataset_related_item_contributor) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_title_ = dataset_.dataset_related_item_title return [d.to_dict() for d in dataset_related_item_title_] diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 1a00e76b..06b12136 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetRights, db dataset_rights = api.model( "DatasetRights", @@ -24,22 +24,22 @@ class DatasetRightsResource(Resource): # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_rights) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_rights_ = dataset_.dataset_rights return [d.to_dict() for d in dataset_rights_] def post(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_rights_ = DatasetRights.from_data(data_obj, data) - db.session.add(dataset_rights_) - db.session.commit() + data_obj = model.Dataset.query.get(dataset_id) + dataset_rights_ = model.DatasetRights.from_data(data_obj, data) + model.db.session.add(dataset_rights_) + model.db.session.commit() return dataset_rights_.to_dict() @api.route("/study//dataset//metadata/rights/") class DatasetRightsUpdate(Resource): def put(self, study_id: int, dataset_id: int, rights_id: int): - dataset_rights_ = DatasetRights.query.get(rights_id) + dataset_rights_ = model.DatasetRights.query.get(rights_id) dataset_rights_.update(request.json) - db.session.commit() + model.db.session.commit() return dataset_rights_.to_dict() diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index a1b4b680..e9cc567d 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetSubject, db dataset_subject = api.model( "DatasetSubject", @@ -25,22 +25,22 @@ class DatasetSubjectResource(Resource): # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_subject) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_subject_ = dataset_.dataset_subject return [d.to_dict() for d in dataset_subject_] def post(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) - dataset_subject_ = DatasetSubject.from_data(data_obj, data) - db.session.add(dataset_subject_) - db.session.commit() + data_obj = model.Dataset.query.get(dataset_id) + dataset_subject_ = model.DatasetSubject.from_data(data_obj, data) + model.db.session.add(dataset_subject_) + model.db.session.commit() return dataset_subject_.to_dict() @api.route("/study//dataset//metadata/subject/") class DatasetSubjectUpdate(Resource): def put(self, study_id: int, dataset_id: int, subject_id: int): - dataset_subject_ = DatasetSubject.query.get(subject_id) + dataset_subject_ = model.DatasetSubject.query.get(subject_id) dataset_subject_.update(request.json) - db.session.commit() + model.db.session.commit() return dataset_subject_.to_dict() diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index dcd885d1..6b394cb0 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,8 +1,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.dataset_metadata_namespace import api -from model import Dataset, DatasetTitle, db dataset_title = api.model( "DatasetTitle", @@ -22,30 +22,30 @@ class DatasetTitleResource(Resource): # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_title) def get(self, study_id: int, dataset_id: int): - dataset_ = Dataset.query.get(dataset_id) + dataset_ = model.Dataset.query.get(dataset_id) dataset_title_ = dataset_.dataset_title return [d.to_dict() for d in dataset_title_] def post(self, study_id: int, dataset_id: int): data = request.json - data_obj = Dataset.query.get(dataset_id) + data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: if "id" in i and i["id"]: - dataset_title_ = DatasetTitle.query.get(i["id"]) + dataset_title_ = model.DatasetTitle.query.get(i["id"]) dataset_title_.update(i) list_of_elements.append(dataset_title_.to_dict()) elif "id" not in i or not i["id"]: - dataset_title_ = DatasetTitle.from_data(data_obj, i) - db.session.add(dataset_title_) + dataset_title_ = model.DatasetTitle.from_data(data_obj, i) + model.db.session.add(dataset_title_) list_of_elements.append(dataset_title_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @api.route("/study//dataset//metadata/title/") class DatasetDescriptionUpdate(Resource): def delete(self, study_id: int, dataset_id: int, title_id: int): - dataset_title_ = DatasetTitle.query.get(title_id) - db.session.delete(dataset_title_) - db.session.commit() + dataset_title_ = model.DatasetTitle.query.get(title_id) + model.db.session.delete(dataset_title_) + model.db.session.commit() return dataset_title_.to_dict() diff --git a/apis/participant.py b/apis/participant.py index 2e526563..e55cf730 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -1,7 +1,7 @@ from flask import Response, request from flask_restx import Namespace, Resource, fields -from model import Participant, Study, db +import model from .authentication import is_granted @@ -28,7 +28,7 @@ class AddParticipant(Resource): @api.response(400, "Validation Error") @api.marshal_with(participant_model) def get(self, study_id: int): - participants = Participant.query.all() + participants = model.Participant.query.all() return [p.to_dict() for p in participants] @api.response(200, "Success") @@ -37,10 +37,10 @@ def get(self, study_id: int): def post(self, study_id: int): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 - study = Study.query.get(study_id) - add_participant = Participant.from_data(request.json, study) - db.session.add(add_participant) - db.session.commit() + study = model.Study.query.get(study_id) + add_participant = model.Participant.from_data(request.json, study) + model.db.session.add(add_participant) + model.db.session.commit() return add_participant.to_dict(), 201 @@ -54,9 +54,9 @@ def put(self, study_id, participant_id: int): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 is_granted("viewer", study_id) - update_participant = Participant.query.get(participant_id) + update_participant = model.Participant.query.get(participant_id) update_participant.update(request.json) - db.session.commit() + model.db.session.commit() return update_participant.to_dict() @api.response(200, "Success") @@ -66,7 +66,7 @@ def delete(self, study_id, participant_id: int): return "Access denied, you can not modify", 403 is_granted("viewer", study_id) - delete_participant = Participant.query.get(participant_id) - db.session.delete(delete_participant) - db.session.commit() + delete_participant = model.Participant.query.get(participant_id) + model.db.session.delete(delete_participant) + model.db.session.commit() return Response(status=204) diff --git a/apis/study.py b/apis/study.py index 6f0daa44..85d5ff4b 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,7 +1,7 @@ from flask import g, request from flask_restx import Namespace, Resource, fields -from model import Study, StudyContributor, User, db +import model from .authentication import is_granted @@ -29,25 +29,25 @@ def get(self): # Study.study_contributors.any(User.id == g.user.id) # ).all() # studies = Study.query.filter(User.id == g.user.id).all() - study_contributors = StudyContributor.query.filter( - StudyContributor.user_id == g.user.id + study_contributors = model.StudyContributor.query.filter( + model.StudyContributor.user_id == g.user.id ).all() # Filter contributors where user_id matches the user's id study_ids = [contributor.study_id for contributor in study_contributors] - studies = Study.query.filter(Study.id.in_(study_ids)).all() + studies = model.Study.query.filter(model.Study.id.in_(study_ids)).all() return [s.to_dict() for s in studies] @api.expect(study_model) @api.response(200, "Success") @api.response(400, "Validation Error") def post(self): - add_study = Study.from_data(request.json) - db.session.add(add_study) + add_study = model.Study.from_data(request.json) + model.db.session.add(add_study) study_id = add_study.id - study_ = Study.query.get(study_id) - study_contributor = StudyContributor.from_data(study_, g.user, "owner") - db.session.add(study_contributor) - db.session.commit() + study_ = model.Study.query.get(study_id) + study_contributor = model.StudyContributor.from_data(study_, g.user, "owner") + model.db.session.add(study_contributor) + model.db.session.commit() return study_.to_dict() @@ -58,25 +58,25 @@ class StudyResource(Resource): @api.response(400, "Validation Error") # @api.marshal_with(study) def get(self, study_id: int): - study1 = Study.query.get(study_id) + study1 = model.Study.query.get(study_id) return study1.to_dict() @api.expect(study_model) @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int): - update_study = Study.query.get(study_id) + update_study = model.Study.query.get(study_id) if not is_granted("update_study", update_study): return "Access denied, you can not modify", 403 update_study.update(request.json) - db.session.commit() + model.db.session.commit() return update_study.to_dict() @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("delete_study", study): return "Access denied, you can not delete study", 403 for d in study.dataset: @@ -84,13 +84,13 @@ def delete(self, study_id: int): version.participants.clear() for d in study.dataset: for version in d.dataset_versions: - db.session.delete(version) - db.session.delete(d) + model.db.session.delete(version) + model.db.session.delete(d) for p in study.participants: - db.session.delete(p) - db.session.delete(study) - db.session.commit() - studies = Study.query.filter( - Study.study_contributors.any(User.id == g.user.id) + model.db.session.delete(p) + model.db.session.delete(study) + model.db.session.commit() + studies = model.Study.query.filter( + model.Study.study_contributors.any(model.User.id == g.user.id) ).all() return [s.to_dict() for s in studies], 201 diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 3e33cacc..568ee24e 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Arm, Study, StudyArm, db from ..authentication import is_granted @@ -36,30 +36,30 @@ class StudyArmResource(Resource): # @api.marshal_with(study_arm) def get(self, study_id): """Get study arm metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) - arm = Arm(study_) + arm = model.Arm(study_) return arm.to_dict() def post(self, study_id): """Create study arm metadata""" - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 data = request.json - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) for i in data: if "id" in i and i["id"]: - study_arm_ = StudyArm.query.get(i["id"]) + study_arm_ = model.StudyArm.query.get(i["id"]) study_arm_.update(i) elif "id" not in i or not i["id"]: - study_arm_ = StudyArm.from_data(study_obj, i) - db.session.add(study_arm_) + study_arm_ = model.StudyArm.from_data(study_obj, i) + model.db.session.add(study_arm_) - db.session.commit() + model.db.session.commit() - arms = Arm(study_obj) + arms = model.Arm(study_obj) return arms.to_dict() @@ -70,11 +70,11 @@ class StudyArmUpdate(Resource): def delete(self, study_id: int, arm_id: int): """Delete study arm metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_arm_ = StudyArm.query.get(arm_id) - db.session.delete(study_arm_) - db.session.commit() + study_arm_ = model.StudyArm.query.get(arm_id) + model.db.session.delete(study_arm_) + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index f0768430..98415a38 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, StudyAvailableIpd, db from ..authentication import is_granted @@ -29,7 +29,7 @@ class StudyAvailableResource(Resource): # @api.marshal_with(study_available) def get(self, study_id: int): """Get study available metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_available_ipd_ = study_.study_available_ipd @@ -45,26 +45,26 @@ def get(self, study_id: int): @api.marshal_with(study_available) def post(self, study_id: int): """Create study available metadata""" - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 data = request.json - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) list_of_elements = [] for i in data: if "id" in i and i["id"]: - study_available_ipd_ = StudyAvailableIpd.query.get(i["id"]) + study_available_ipd_ = model.StudyAvailableIpd.query.get(i["id"]) study_available_ipd_.update(i) list_of_elements.append(study_available_ipd_.to_dict()) elif "id" not in i or not i["id"]: - study_available_ipd_ = StudyAvailableIpd.from_data(study_obj, i) - db.session.add(study_available_ipd_) + study_available_ipd_ = model.StudyAvailableIpd.from_data(study_obj, i) + model.db.session.add(study_available_ipd_) list_of_elements.append(study_available_ipd_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @@ -75,12 +75,12 @@ class StudyLocationUpdate(Resource): def delete(self, study_id: int, available_ipd_id: int): """Delete study available metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_available_ = StudyAvailableIpd.query.get(available_ipd_id) + study_available_ = model.StudyAvailableIpd.query.get(available_ipd_id) - db.session.delete(study_available_) - db.session.commit() + model.db.session.delete(study_available_) + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 20cf6411..65e91721 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, StudyContact, db from ..authentication import is_granted @@ -32,7 +32,7 @@ class StudyContactResource(Resource): @api.marshal_with(study_contact) def get(self, study_id: int): """Get study contact metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_contact_ = study_.study_contact @@ -42,26 +42,26 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study contact metadata""" - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 data = request.json - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) list_of_elements = [] for i in data: if "id" in i and i["id"]: - study_contact_ = StudyContact.query.get(i["id"]) + study_contact_ = model.StudyContact.query.get(i["id"]) study_contact_.update(i) list_of_elements.append(study_contact_.to_dict()) elif "id" not in i or not i["id"]: - study_contact_ = StudyContact.from_data(study_obj, i, None, True) - db.session.add(study_contact_) + study_contact_ = model.StudyContact.from_data(study_obj, i, None, True) + model.db.session.add(study_contact_) list_of_elements.append(study_contact_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @@ -70,13 +70,13 @@ class StudyContactUpdate(Resource): """Study Contact Metadata""" def delete(self, study_id: int, central_contact_id: int): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 """Delete study contact metadata""" - study_contact_ = StudyContact.query.get(central_contact_id) + study_contact_ = model.StudyContact.query.get(central_contact_id) - db.session.delete(study_contact_) - db.session.commit() + model.db.session.delete(study_contact_) + model.db.session.commit() return study_contact_.to_dict() diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 9e9cea8e..47009f79 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, db from ..authentication import is_granted @@ -27,7 +27,7 @@ class StudyDescriptionResource(Resource): @api.marshal_with(study_description) def get(self, study_id: int): """Get study description metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_description_ = study_.study_description @@ -35,13 +35,13 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study description metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_.study_description.update(request.json) - db.session.commit() + model.db.session.commit() return study_.study_description.to_dict() diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 04098c82..fe3fb0f9 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, db from ..authentication import is_granted @@ -43,21 +43,21 @@ class StudyDesignResource(Resource): @api.marshal_with(study_design) def get(self, study_id: int): """Get study design metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_design_ = study_.study_design return study_design_.to_dict() def put(self, study_id: int): - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 """Update study design metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_.study_design.update(request.json) - db.session.commit() + model.db.session.commit() return study_.study_design.to_dict() diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index e1806f32..e8d03b3e 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, db from ..authentication import is_granted @@ -39,18 +39,18 @@ class StudyEligibilityResource(Resource): @api.marshal_with(study_eligibility) def get(self, study_id: int): """Get study eligibility metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) return study_.study_eligibility.to_dict() def put(self, study_id: int): """Update study eligibility metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) if not is_granted("study_metadata", study_): return "Access denied, you can not delete study", 403 study_.study_eligibility.update(request.json) - db.session.commit() + model.db.session.commit() return study_.study_eligibility.to_dict() diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 0fc492a1..37e5d86a 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Identifiers, Study, StudyIdentification, db from ..authentication import is_granted @@ -31,8 +31,8 @@ class StudyIdentificationResource(Resource): # @api.marshal_with(study_identification) def get(self, study_id: int): """Get study identification metadata""" - study_ = Study.query.get(study_id) - identifiers = Identifiers(study_) + study_ = model.Study.query.get(study_id) + identifiers = model.Identifiers(study_) return identifiers.to_dict() @api.doc("identification add") @@ -43,36 +43,36 @@ def post(self, study_id: int): """Create study identification metadata""" data = request.json - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 primary = data["primary"] primary["secondary"] = False if "id" in primary and primary["id"]: - study_identification_ = StudyIdentification.query.get(primary["id"]) + study_identification_ = model.StudyIdentification.query.get(primary["id"]) study_identification_.update(primary) elif "id" not in primary or not primary["id"]: - study_identification_ = StudyIdentification.from_data( + study_identification_ = model.StudyIdentification.from_data( study_obj, primary, False ) - db.session.add(study_identification_) + model.db.session.add(study_identification_) for i in data["secondary"]: i["secondary"] = True if "id" in i and i["id"]: - study_identification_ = StudyIdentification.query.get(i["id"]) + study_identification_ = model.StudyIdentification.query.get(i["id"]) study_identification_.update(i) elif "id" not in i or not i["id"]: - study_identification_ = StudyIdentification.from_data( + study_identification_ = model.StudyIdentification.from_data( study_obj, i, True ) - db.session.add(study_identification_) + model.db.session.add(study_identification_) - db.session.commit() + model.db.session.commit() - identifiers = Identifiers(study_obj) + identifiers = model.Identifiers(study_obj) return identifiers.to_dict() @@ -82,15 +82,17 @@ class StudyIdentificationdUpdate(Resource): def delete(self, study_id: int, identification_id: int): """Delete study identification metadata""" - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - study_identification_ = StudyIdentification.query.get(identification_id) + study_identification_ = model.StudyIdentification.query.get( + identification_id + ) if not study_identification_.secondary: return 400, "primary identifier can not be deleted" - db.session.delete(study_identification_) - db.session.commit() + model.db.session.delete(study_identification_) + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 359a185d..90b2248e 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, StudyIntervention, db from ..authentication import is_granted @@ -31,7 +31,7 @@ class StudyInterventionResource(Resource): @api.marshal_with(study_intervention) def get(self, study_id: int): """Get study intervention metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_intervention_ = study_.study_intervention @@ -43,22 +43,22 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study intervention metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 list_of_elements = [] data = request.json for i in data: if "id" in i and i["id"]: - study_intervention_ = StudyIntervention.query.get(i["id"]) + study_intervention_ = model.StudyIntervention.query.get(i["id"]) study_intervention_.update(i) list_of_elements.append(study_intervention_.to_dict()) elif "id" not in i or not i["id"]: - study_intervention_ = StudyIntervention.from_data(study_obj, i) - db.session.add(study_intervention_) + study_intervention_ = model.StudyIntervention.from_data(study_obj, i) + model.db.session.add(study_intervention_) list_of_elements.append(study_intervention_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @@ -68,13 +68,13 @@ class StudyInterventionUpdate(Resource): def delete(self, study_id: int, intervention_id: int): """Delete study intervention metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_intervention_ = StudyIntervention.query.get(intervention_id) + study_intervention_ = model.StudyIntervention.query.get(intervention_id) - db.session.delete(study_intervention_) + model.db.session.delete(study_intervention_) - db.session.commit() + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 08811716..5135c905 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, db from ..authentication import is_granted @@ -32,15 +32,15 @@ class StudyIpdsharingResource(Resource): @api.marshal_with(study_ipdsharing) def get(self, study_id: int): """Get study ipdsharing metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) return study_.study_ipdsharing.to_dict() def put(self, study_id: int): """Create study ipdsharing metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) if not is_granted("study_metadata", study_): return "Access denied, you can not delete study", 403 study_.study_ipdsharing.update(request.json) - db.session.commit() + model.db.session.commit() return study_.study_ipdsharing.to_dict() diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 680e57aa..0dd0399a 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, StudyLink, db from ..authentication import is_granted @@ -28,33 +28,33 @@ class StudyLinkResource(Resource): @api.marshal_with(study_link) def get(self, study_id: int): """Get study link metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_link_ = study_.study_link sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_link_] def post(self, study_id: int): """Create study link metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 data = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: - study_link_ = StudyLink.query.get(i["id"]) + study_link_ = model.StudyLink.query.get(i["id"]) if study_link_ is None: return f"Study link {i['id']} Id is not found", 404 study_link_.update(i) list_of_elements.append(study_link_.to_dict()) elif "id" not in i or not i["id"]: - study_link_ = StudyLink.from_data(study_obj, i) - db.session.add(study_link_) + study_link_ = model.StudyLink.from_data(study_obj, i) + model.db.session.add(study_link_) list_of_elements.append(study_link_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @@ -64,13 +64,13 @@ class StudyLinkUpdate(Resource): def delete(self, study_id: int, link_id: int): """Delete study link metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_link_ = StudyLink.query.get(link_id) + study_link_ = model.StudyLink.query.get(link_id) - db.session.delete(study_link_) + model.db.session.delete(study_link_) - db.session.commit() + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index ff6687e0..074c0379 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, StudyLocation, db from ..authentication import is_granted @@ -32,7 +32,7 @@ class StudyLocationResource(Resource): @api.marshal_with(study_location) def get(self, study_id: int): """Get study location metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_location_ = study_.study_location @@ -42,22 +42,22 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study location metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 data = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: - study_location_ = StudyLocation.query.get(i["id"]) + study_location_ = model.StudyLocation.query.get(i["id"]) study_location_.update(i) list_of_elements.append(study_location_.to_dict()) elif "id" not in i or not i["id"]: - study_location_ = StudyLocation.from_data(study_obj, i) - db.session.add(study_location_) + study_location_ = model.StudyLocation.from_data(study_obj, i) + model.db.session.add(study_location_) list_of_elements.append(study_location_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @@ -68,13 +68,13 @@ class StudyLocationUpdate(Resource): def delete(self, study_id: int, location_id: int): """Delete study location metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_location_ = StudyLocation.query.get(location_id) + study_location_ = model.StudyLocation.query.get(location_id) - db.session.delete(study_location_) + model.db.session.delete(study_location_) - db.session.commit() + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 9604d19b..46bdc901 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, db from ..authentication import is_granted @@ -30,7 +30,7 @@ class StudyOtherResource(Resource): @api.marshal_with(study_other) def get(self, study_id: int): """Get study other metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_other_ = study_.study_other @@ -38,11 +38,11 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study other metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_.study_other.update(request.json) - db.session.commit() + model.db.session.commit() return study_.study_other.to_dict() @@ -57,7 +57,7 @@ class StudyOversightResource(Resource): # @api.marshal_with(study_other) def get(self, study_id: int): """Get study oversight metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc @@ -65,7 +65,7 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study oversight metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 data = request.json @@ -73,7 +73,7 @@ def put(self, study_id: int): "oversight_has_dmc" ] study_obj.touch() - db.session.commit() + model.db.session.commit() return study_oversight @@ -89,7 +89,7 @@ class StudyConditionsResource(Resource): # @api.marshal_with(study_other) def get(self, study_id: int): """Get study conditions metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_other_conditions = study_.study_other.conditions @@ -98,11 +98,11 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study conditions metadata""" data = request.json - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 study_obj.study_other.conditions = data study_obj.touch() - db.session.commit() + model.db.session.commit() return study_obj.study_other.conditions diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index db42bbe3..e083c8ef 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, StudyOverallOfficial, db from ..authentication import is_granted @@ -29,7 +29,7 @@ class StudyOverallOfficialResource(Resource): # @api.marshal_with(study_overall_official) def get(self, study_id: int): """Get study overall official metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_overall_official_ = study_.study_overall_official @@ -46,21 +46,23 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study overall official metadata""" data = request.json - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 list_of_elements = [] for i in data: if "id" in i and i["id"]: - study_overall_official_ = StudyOverallOfficial.query.get(i["id"]) + study_overall_official_ = model.StudyOverallOfficial.query.get(i["id"]) study_overall_official_.update(i) list_of_elements.append(study_overall_official_.to_dict()) elif "id" not in i or not i["id"]: - study_overall_official_ = StudyOverallOfficial.from_data(study_obj, i) - db.session.add(study_overall_official_) + study_overall_official_ = model.StudyOverallOfficial.from_data( + study_obj, i + ) + model.db.session.add(study_overall_official_) list_of_elements.append(study_overall_official_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @@ -70,13 +72,13 @@ class StudyOverallOfficialUpdate(Resource): @api.response(400, "Validation Error") def delete(self, study_id: int, overall_official_id: int): """Delete study overall official metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_overall_official_ = StudyOverallOfficial.query.get( + study_overall_official_ = model.StudyOverallOfficial.query.get( overall_official_id ) - db.session.delete(study_overall_official_) - db.session.commit() + model.db.session.delete(study_overall_official_) + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 2d45c44d..6f40abb7 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, StudyReference, db from ..authentication import is_granted @@ -30,7 +30,7 @@ class StudyReferenceResource(Resource): @api.marshal_with(study_reference) def get(self, study_id: int): """Get study reference metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_reference_ = study_.study_reference @@ -43,22 +43,22 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study reference metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 data = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: - study_reference_ = StudyReference.query.get(i["id"]) + study_reference_ = model.StudyReference.query.get(i["id"]) study_reference_.update(i) list_of_elements.append(study_reference_.to_dict()) elif "id" not in i or not i["id"]: - study_reference_ = StudyReference.from_data(study_obj, i) - db.session.add(study_reference_) + study_reference_ = model.StudyReference.from_data(study_obj, i) + model.db.session.add(study_reference_) list_of_elements.append(study_reference_.to_dict()) - db.session.commit() + model.db.session.commit() return list_of_elements @@ -68,13 +68,13 @@ class StudyReferenceUpdate(Resource): def delete(self, study_id: int, reference_id: int): """Delete study reference metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study_reference_ = StudyReference.query.get(reference_id) + study_reference_ = model.StudyReference.query.get(reference_id) - db.session.delete(study_reference_) + model.db.session.delete(study_reference_) - db.session.commit() + model.db.session.commit() return 204 diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 5a1b82a1..46a9527c 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, db from ..authentication import is_granted @@ -38,7 +38,7 @@ class StudySponsorsResource(Resource): @api.marshal_with(study_sponsors) def get(self, study_id: int): """Get study sponsors metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_sponsors_collaborators_ = study_.study_sponsors_collaborators @@ -46,11 +46,11 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study sponsors metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_.study_sponsors_collaborators.update(request.json) - db.session.commit() + model.db.session.commit() return study_.study_sponsors_collaborators.to_dict() @@ -65,7 +65,7 @@ class StudyCollaboratorsResource(Resource): # @api.marshal_with(study_collaborators) def get(self, study_id: int): """Get study collaborators metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name @@ -76,10 +76,10 @@ def get(self, study_id: int): def put(self, study_id: int): """updating study collaborators""" data = request.json - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 study_obj.study_sponsors_collaborators.collaborator_name = data study_obj.touch() - db.session.commit() + model.db.session.commit() return study_obj.study_sponsors_collaborators.collaborator_name diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index afebbb53..ef0df660 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -2,8 +2,8 @@ from flask import request from flask_restx import Resource, fields +import model from apis.study_metadata_namespace import api -from model import Study, db from ..authentication import is_granted @@ -32,7 +32,7 @@ class StudyStatusResource(Resource): @api.marshal_with(study_status) def get(self, study_id: int): """Get study status metadata""" - study_ = Study.query.get(study_id) + study_ = model.Study.query.get(study_id) study_status_ = study_.study_status @@ -40,13 +40,13 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study status metadata""" - study_obj = Study.query.get(study_id) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - study = Study.query.get(study_id) + study = model.Study.query.get(study_id) study.study_status.update(request.json) - db.session.commit() + model.db.session.commit() return study.study_status.to_dict() diff --git a/apis/user.py b/apis/user.py index 2fd8de1d..cc7a02e5 100644 --- a/apis/user.py +++ b/apis/user.py @@ -1,7 +1,7 @@ from flask import g, request from flask_restx import Namespace, Resource, fields -from model import User, db +import model api = Namespace("User", description="User tables", path="/") @@ -31,7 +31,7 @@ class UserDetailsEndpoint(Resource): @api.response(400, "Validation Error") def get(self): """Returns user details""" - user = User.query.get(g.user.id) + user = model.User.query.get(g.user.id) user_details = user.user_details user_information = user.to_dict() # combine user and user_details to return a single object @@ -45,11 +45,11 @@ def put(self): data = request.json if data is None: return {"message": "No data provided"}, 400 - user = User.query.get(g.user.id) + user = model.User.query.get(g.user.id) # user.update(data) # don't update the username and email_address for now user_details = user.user_details user_details.update(data) - db.session.commit() + model.db.session.commit() # combine user and user_details to return a single object user_information = user.to_dict() diff --git a/model/study.py b/model/study.py index 4ad342c0..90eee078 100644 --- a/model/study.py +++ b/model/study.py @@ -181,7 +181,7 @@ def from_data(data: dict): return study - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" if not data["title"]: raise exception.ValidationException("title is required") diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 4b66db50..fa5d4cd9 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -24,7 +24,8 @@ def to_dict(self): for identifier in sorted_study_identifications if not identifier.secondary ] - ) != 0 + ) + != 0 else [], "secondary": [ identifier.to_dict() From bad637ed5f2e030c2012d72e7143ac569a6dd203 Mon Sep 17 00:00:00 2001 From: aydawka Date: Wed, 11 Oct 2023 19:18:21 -0700 Subject: [PATCH 272/505] fix: added string types to models --- model/__init__.py | 55 ++++++++++--------- model/dataset.py | 3 + model/dataset_metadata/dataset_access.py | 9 ++- .../dataset_alternate_identifier.py | 2 +- model/dataset_metadata/dataset_consent.py | 2 +- .../dataset_contributor_affiliation.py | 2 +- model/dataset_metadata/dataset_date.py | 2 +- .../dataset_de_ident_level.py | 2 +- model/dataset_metadata/dataset_description.py | 2 +- model/dataset_metadata/dataset_funder.py | 2 +- .../dataset_managing_organization.py | 2 +- model/dataset_metadata/dataset_other.py | 2 +- model/dataset_metadata/dataset_readme.py | 2 +- model/dataset_metadata/dataset_record_keys.py | 2 +- .../dataset_metadata/dataset_related_item.py | 2 +- .../dataset_related_item_contributor.py | 2 +- .../dataset_related_item_other.py | 2 +- .../dataset_related_item_title.py | 2 +- model/dataset_metadata/dataset_rights.py | 2 +- model/dataset_metadata/dataset_subject.py | 2 +- model/dataset_metadata/dataset_title.py | 2 +- model/study_metadata/study_arm.py | 5 +- model/study_metadata/study_available_ipd.py | 5 +- model/study_metadata/study_contact.py | 8 +-- model/study_metadata/study_description.py | 7 ++- model/study_metadata/study_design.py | 7 ++- model/study_metadata/study_eligibility.py | 7 ++- model/study_metadata/study_identification.py | 5 +- model/study_metadata/study_intervention.py | 5 +- model/study_metadata/study_ipdsharing.py | 5 +- model/study_metadata/study_link.py | 5 +- model/study_metadata/study_location.py | 5 +- model/study_metadata/study_other.py | 5 +- .../study_metadata/study_overall_official.py | 5 +- model/study_metadata/study_reference.py | 5 +- .../study_sponsors_collaborators.py | 9 +-- model/study_metadata/study_status.py | 5 +- 37 files changed, 111 insertions(+), 85 deletions(-) diff --git a/model/__init__.py b/model/__init__.py index 83ec4d68..0bbf9903 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,17 +1,20 @@ -from model.dataset_metadata.dataset_related_item import DatasetRelatedItem -from model.dataset_metadata.dataset_related_item_contributor import ( - DatasetRelatedItemContributor, -) -from model.dataset_metadata.dataset_related_item_identifier import ( - DatasetRelatedItemIdentifier, -) -from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther -from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle +from .version import Version +from .dataset_versions import DatasetVersions +from .db import db +from .participant import Participant +from .study import Study, StudyException +from .user import User from .dataset import Dataset + +from .email_verification import EmailVerification +from .token_blacklist import TokenBlacklist +from .user_details import UserDetails from .dataset_contributor import DatasetContributor +from .invited_study_contributor import StudyInvitedContributor +from .study_contributor import StudyContributor + from .dataset_metadata.dataset_access import DatasetAccess -from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_consent import DatasetConsent from .dataset_metadata.dataset_contributor_affiliation import ( DatasetContributorAffiliation, @@ -20,22 +23,25 @@ from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder +from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_managing_organization import DatasetManagingOrganization from .dataset_metadata.dataset_other import DatasetOther from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights -from .dataset_metadata.dataset_subject import DatasetSubject from .dataset_metadata.dataset_title import DatasetTitle -from .dataset_versions import DatasetVersions -from .db import db -from .email_verification import EmailVerification -from .invited_study_contributor import StudyInvitedContributor -from .participant import Participant -from .study import Study, StudyException -from .study_contributor import StudyContributor -from .study_metadata.arm import Arm -from .study_metadata.identifiers import Identifiers +from .dataset_metadata.dataset_subject import DatasetSubject + +from model.dataset_metadata.dataset_related_item_contributor import ( + DatasetRelatedItemContributor, +) +from model.dataset_metadata.dataset_related_item_identifier import ( + DatasetRelatedItemIdentifier, +) +from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther +from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle +from model.dataset_metadata.dataset_related_item import DatasetRelatedItem + from .study_metadata.study_arm import StudyArm from .study_metadata.study_available_ipd import StudyAvailableIpd from .study_metadata.study_contact import StudyContact @@ -52,10 +58,9 @@ from .study_metadata.study_reference import StudyReference from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus -from .token_blacklist import TokenBlacklist -from .user import User -from .user_details import UserDetails -from .version import Version +from .study_metadata.identifiers import Identifiers +from .study_metadata.arm import Arm + __all__ = [ "Study", @@ -111,4 +116,4 @@ "EmailVerification", "TokenBlacklist", "UserDetails", -] +] \ No newline at end of file diff --git a/model/dataset.py b/model/dataset.py index 9adb3d29..bbf37f11 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -107,6 +107,8 @@ def last_published(self): def last_modified(self): return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() + + @staticmethod def from_data(study, data: dict): dataset_obj = Dataset(study) @@ -117,3 +119,4 @@ def update(self, data: dict): """Creates a new dataset from a dictionary""" self.updated_on = datetime.datetime.now(timezone.utc).timestamp() # self.dataset_versions = data["dataset_versions"] + diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 366aeaf7..cbd2505c 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -1,7 +1,7 @@ import uuid from ..db import db - +from model import Dataset class DatasetAccess(db.Model): def __init__(self, dataset): @@ -27,14 +27,17 @@ def to_dict(self): "url_last_checked": self.url_last_checked, } + @staticmethod - def from_data(dataset, data: dict): + def from_data(dataset: Dataset, data: dict): dataset_access = DatasetAccess(dataset) dataset_access.update(data) return dataset_access - def update(self, data): + def update(self, data: dict): self.description = data["description"] self.url = data["url"] self.url_last_checked = data["url_last_checked"] self.type = data["type"] + + diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index dca8470d..368ecfe2 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -29,6 +29,6 @@ def from_data(dataset, data: dict): dataset_date.update(data) return dataset_date - def update(self, data): + def update(self, data: dict): self.identifier = data["identifier"] self.identifier_type = data["identifier_type"] diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 2bf44fd9..8fc7ac45 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -40,7 +40,7 @@ def from_data(dataset, data: dict): dataset_consent.update(data) return dataset_consent - def update(self, data): + def update(self, data: dict): self.type = data["type"] self.noncommercial = data["noncommercial"] self.geog_restrict = data["geog_restrict"] diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index f76f1ac3..0362adcc 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -34,7 +34,7 @@ def from_data(dataset, data: dict): dataset_contributor.update(data) return dataset_contributor - def update(self, data): + def update(self, data: dict): self.identifier = data["identifier"] self.identifier_scheme = data["identifier_scheme"] self.identifier_scheme_uri = data["identifier_scheme_uri"] diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index 71bbb1d0..e3716cdf 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -31,7 +31,7 @@ def from_data(dataset, data: dict): dataset_date.update(data) return dataset_date - def update(self, data): + def update(self, data: dict): self.date = data["date"] self.date_type = data["date_type"] self.data_information = data["data_information"] diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index e639aff1..a83885b2 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -40,7 +40,7 @@ def from_data(dataset, data: dict): dataset_de_ident_level.update(data) return dataset_de_ident_level - def update(self, data): + def update(self, data: dict): self.type = data["type"] self.direct = data["direct"] self.hipaa = data["hipaa"] diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 1b4f654d..c927ce62 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -29,6 +29,6 @@ def from_data(dataset, data: dict): dataset_description.update(data) return dataset_description - def update(self, data): + def update(self, data: dict): self.description = data["description"] self.description_type = data["description_type"] diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index fe71ec16..b0ed2907 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -39,7 +39,7 @@ def from_data(dataset, data: dict): dataset_funder.update(data) return dataset_funder - def update(self, data): + def update(self, data: dict): self.name = data["name"] self.identifier = data["identifier"] self.identifier_type = data["identifier_type"] diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py index 5fe38e99..f46532a8 100644 --- a/model/dataset_metadata/dataset_managing_organization.py +++ b/model/dataset_metadata/dataset_managing_organization.py @@ -30,6 +30,6 @@ def from_data(dataset, data: dict): dataset_managing_organization.update(data) return dataset_managing_organization - def update(self, data): + def update(self, data: dict): self.name = data["name"] self.ror_id = data["ror_id"] diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index a0607b2f..3dd99ddf 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -41,7 +41,7 @@ def from_data(dataset, data: dict): dataset_other.update(data) return dataset_other - def update(self, data): + def update(self, data: dict): self.language = data["language"] self.managing_organization_name = data["managing_organization_name"] self.managing_organization_ror_id = data["managing_organization_ror_id"] diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index 321540b0..612e80df 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -27,5 +27,5 @@ def from_data(dataset, data: dict): dataset_readme.update(data) return dataset_readme - def update(self, data): + def update(self, data: dict): self.content = data["content"] diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 10063033..7a2003e5 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -29,6 +29,6 @@ def from_data(dataset, data: dict): dataset_record_keys.update(data) return dataset_record_keys - def update(self, data): + def update(self, data: dict): self.key_type = data["key_type"] self.key_details = data["key_details"] diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index 1975a761..1bc311e9 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -41,6 +41,6 @@ def from_data(dataset, data: dict): dataset_related_item.update(data) return dataset_related_item - def update(self, data): + def update(self, data: dict): self.type = data["type"] self.relation_type = data["relation_type"] diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index 5340d5f0..1b145652 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -37,7 +37,7 @@ def from_data(dataset, data: dict): dataset_related_contributor.update(data) return dataset_related_contributor - def update(self, data): + def update(self, data: dict): self.name = data["name"] self.name_type = data["name_type"] self.creator = data["creator"] diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py index bc7071ab..ca752f65 100644 --- a/model/dataset_metadata/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -48,7 +48,7 @@ def from_data(dataset, data: dict): dataset_related_item_other.update(data) return dataset_related_item_other - def update(self, data): + def update(self, data: dict): self.publication_year = data["publication_year"] self.volume = data["volume"] self.issue = data["issue"] diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 642cd771..197a715e 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -33,6 +33,6 @@ def from_data(dataset, data: dict): dataset_related_item_title.update(data) return dataset_related_item_title - def update(self, data): + def update(self, data: dict): self.type = data["type"] self.title = data["title"] diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index ec46da69..1624594f 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -34,7 +34,7 @@ def from_data(dataset, data: dict): dataset_rights.update(data) return dataset_rights - def update(self, data): + def update(self, data: dict): self.rights = data["rights"] self.uri = data["uri"] self.identifier = data["identifier"] diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 39b0de75..2bfe2216 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -36,7 +36,7 @@ def from_data(dataset, data: dict): dataset_subject.update(data) return dataset_subject - def update(self, data): + def update(self, data: dict): self.subject = data["subject"] self.scheme = data["scheme"] self.scheme_uri = data["scheme_uri"] diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index ca413f46..9f02ec91 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -30,6 +30,6 @@ def from_data(dataset, data: dict): return dataset_title - def update(self, data): + def update(self, data: dict): self.title = data["title"] self.type = data["type"] diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 488e09cb..9029eb44 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -2,6 +2,7 @@ import uuid from datetime import timezone +import model from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -42,13 +43,13 @@ def to_dict(self): } @staticmethod - def from_data(study, data): + def from_data(study: model.Study, data: dict): """Creates a new study from a dictionary""" study_arm = StudyArm(study) study_arm.update(data) return study_arm - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.label = data["label"] self.type = data["type"] diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 2ba837dd..7b1214b8 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -2,6 +2,7 @@ import uuid from datetime import timezone +import model from ..db import db @@ -39,13 +40,13 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: model.StudyArm, data: dict): """Creates a new study metadata from a dictionary""" study_available = StudyAvailableIpd(study) study_available.update(data) return study_available - def update(self, data): + def update(self, data: dict): """Updates the study metadata from a dictionary""" self.identifier = data["identifier"] self.type = data["type"] diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 1ec50c44..adebbc5f 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -3,12 +3,12 @@ from datetime import timezone from ..db import db - +from model import Study class StudyContact(db.Model): """A study is a collection of datasets and participants""" - def __init__(self, study, role, central_contact): + def __init__(self, study: Study, role, central_contact): self.id = str(uuid.uuid4()) self.study = study self.role = role @@ -47,14 +47,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict, role, central_contact): + def from_data(study: Study, data: dict, role, central_contact): """Creates a new study from a dictionary""" study_contact = StudyContact(study, role, central_contact) study_contact.update(data) return study_contact - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.name = data["name"] self.affiliation = data["affiliation"] diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index b5df990d..4b13979d 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -1,12 +1,13 @@ import uuid from ..db import db +from model import Study class StudyDescription(db.Model): """A study is a collection of datasets and participants""" - def __init__(self, study): + def __init__(self, study: Study): self.id = str(uuid.uuid4()) self.study = study self.brief_summary = "" @@ -32,14 +33,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_description = StudyDescription(study) study_description.update(data) return study_description - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.brief_summary = data["brief_summary"] self.detailed_description = data["detailed_description"] diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 944d6662..280b372d 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -4,12 +4,13 @@ from sqlalchemy.dialects.postgresql import ARRAY from ..db import db +from model import Study class StudyDesign(db.Model): """A study is a collection of datasets and participants""" - def __init__(self, study): + def __init__(self, study: Study): self.id = str(uuid.uuid4()) self.study = study @@ -84,14 +85,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_design = StudyDesign(study) study_design.update(data) return study_design - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.design_allocation = data["design_allocation"] self.study_type = data["study_type"] diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 65ccad3b..5d488add 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -4,12 +4,13 @@ from sqlalchemy.dialects.postgresql import ARRAY from ..db import db +from model import Study class StudyEligibility(db.Model): """A study is a collection of datasets and participants""" - def __init__(self, study): + def __init__(self, study: Study): self.id = str(uuid.uuid4()) self.study = study self.gender = "" @@ -68,14 +69,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_eligibility = StudyEligibility(study) study_eligibility.update(data) return study_eligibility - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.gender = data["gender"] self.gender_based = data["gender_based"] diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 7341c8cc..c7c9e5e0 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -3,6 +3,7 @@ from datetime import timezone from ..db import db +from model import Study class StudyIdentification(db.Model): @@ -37,14 +38,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict, secondary): + def from_data(study: Study, data: dict, secondary): """Creates a new study from a dictionary""" study_identification = StudyIdentification(study, secondary) study_identification.update(data) return study_identification - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.identifier = data["identifier"] self.identifier_type = data["identifier_type"] diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 9e0e37eb..4428d8d8 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -6,6 +6,7 @@ from sqlalchemy.dialects.postgresql import ARRAY from ..db import db +from model import Study class StudyIntervention(db.Model): @@ -44,14 +45,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_intervention = StudyIntervention(study) study_intervention.update(data) return study_intervention - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.type = data["type"] self.name = data["name"] diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index d091781c..86be8376 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -4,6 +4,7 @@ from sqlalchemy.dialects.postgresql import ARRAY from ..db import db +from model import Study class StudyIpdsharing(db.Model): @@ -47,14 +48,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_ipdsharing = StudyIpdsharing(study) study_ipdsharing.update(data) return study_ipdsharing - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.ipd_sharing = data["ipd_sharing"] self.ipd_sharing_description = data["ipd_sharing_description"] diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 53c23a06..e40f08b2 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -3,6 +3,7 @@ from datetime import timezone from ..db import db +from model import Study class StudyLink(db.Model): @@ -35,14 +36,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_link = StudyLink(study) study_link.update(data) return study_link - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.url = data["url"] self.title = data["title"] diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index d5ba2d86..f47cfd3f 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -3,6 +3,7 @@ from datetime import timezone from ..db import db +from model import Study class StudyLocation(db.Model): @@ -43,14 +44,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_location = StudyLocation(study) study_location.update(data) return study_location - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.facility = data["facility"] self.status = data["status"] diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index bb1dced0..78e729a1 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -4,6 +4,7 @@ from sqlalchemy.dialects.postgresql import ARRAY from ..db import db +from model import Study class StudyOther(db.Model): @@ -41,14 +42,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_other = StudyOther(study) study_other.update(data) return study_other - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.oversight_has_dmc = data["oversight_has_dmc"] self.conditions = data["conditions"] diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index a4d90ebe..8a725629 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -3,6 +3,7 @@ from datetime import timezone from ..db import db +from model import Study class StudyOverallOfficial(db.Model): @@ -37,14 +38,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_overall_official = StudyOverallOfficial(study) study_overall_official.update(data) return study_overall_official - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.name = data["name"] self.affiliation = data["affiliation"] diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 1b6ecfb1..8b5f22da 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -3,6 +3,7 @@ from datetime import timezone from ..db import db +from model import Study class StudyReference(db.Model): @@ -37,14 +38,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_reference = StudyReference(study) study_reference.update(data) return study_reference - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.identifier = data["identifier"] self.type = data["type"] diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 08292fa9..65b7aa6d 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -4,6 +4,7 @@ from sqlalchemy.dialects.postgresql import ARRAY from ..db import db +from model import Study class StudySponsorsCollaborators(db.Model): @@ -46,14 +47,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_sponsors_collaborators = StudySponsorsCollaborators(study) study_sponsors_collaborators.update(data) return study_sponsors_collaborators - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.responsible_party_type = data["responsible_party_type"] @@ -69,14 +70,14 @@ def update(self, data): self.lead_sponsor_name = data["lead_sponsor_name"] @staticmethod - def from_data_(study, data: dict): + def from_data_(study: Study, data: dict): """Creates a new study from a dictionary""" study_sponsors_collaborators = StudySponsorsCollaborators(study) study_sponsors_collaborators.update(data) return study_sponsors_collaborators - def update_collaborators(self, data): + def update_collaborators(self, data: dict): """Updates the study from a dictionary""" self.collaborator_name = data["collaborator_name"] self.study.touch() diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index e1973dac..2d346f1b 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -1,6 +1,7 @@ import uuid from ..db import db +from model import Study class StudyStatus(db.Model): @@ -44,14 +45,14 @@ def to_dict(self): } @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" study_status = StudyStatus(study) study_status.update(data) return study_status - def update(self, data): + def update(self, data: dict): """Updates the study from a dictionary""" self.overall_status = data["overall_status"] self.why_stopped = data["why_stopped"] From 5e95e5e4b73f322818a87a4a787337c4a9199f3a Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 09:34:25 -0700 Subject: [PATCH 273/505] fix: update arg types in models/functions --- apis/contributor.py | 2 - model/__init__.py | 55 +++++++++---------- model/dataset.py | 10 ++-- model/dataset_metadata/dataset_access.py | 7 +-- .../dataset_related_item_title.py | 2 +- model/invited_study_contributor.py | 4 +- model/participant.py | 2 +- model/study.py | 2 +- model/study_contributor.py | 6 +- model/study_metadata/study_arm.py | 3 +- model/study_metadata/study_available_ipd.py | 1 + model/study_metadata/study_contact.py | 4 +- model/study_metadata/study_description.py | 3 +- model/study_metadata/study_design.py | 3 +- model/study_metadata/study_eligibility.py | 3 +- model/study_metadata/study_identification.py | 3 +- model/study_metadata/study_intervention.py | 3 +- model/study_metadata/study_ipdsharing.py | 3 +- model/study_metadata/study_link.py | 3 +- model/study_metadata/study_location.py | 3 +- model/study_metadata/study_other.py | 3 +- .../study_metadata/study_overall_official.py | 3 +- model/study_metadata/study_reference.py | 3 +- .../study_sponsors_collaborators.py | 3 +- model/study_metadata/study_status.py | 3 +- model/token_blacklist.py | 2 +- model/user.py | 4 +- model/user_details.py | 6 +- model/version.py | 2 +- tests/conftest.py | 8 +-- 30 files changed, 86 insertions(+), 73 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 1b26b1b7..c66f2ce7 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -82,11 +82,9 @@ def put(self, study_id: int, user_id: int): grantee = model.StudyContributor.query.filter( model.StudyContributor.user == user, model.StudyContributor.study == study ).first() - granter = model.StudyContributor.query.filter( model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() - # Order should go from the least privileged to the most privileged grants = OrderedDict() grants["viewer"] = [] diff --git a/model/__init__.py b/model/__init__.py index 0bbf9903..83ec4d68 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,20 +1,17 @@ -from .version import Version -from .dataset_versions import DatasetVersions +from model.dataset_metadata.dataset_related_item import DatasetRelatedItem +from model.dataset_metadata.dataset_related_item_contributor import ( + DatasetRelatedItemContributor, +) +from model.dataset_metadata.dataset_related_item_identifier import ( + DatasetRelatedItemIdentifier, +) +from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther +from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle -from .db import db -from .participant import Participant -from .study import Study, StudyException -from .user import User from .dataset import Dataset - -from .email_verification import EmailVerification -from .token_blacklist import TokenBlacklist -from .user_details import UserDetails from .dataset_contributor import DatasetContributor -from .invited_study_contributor import StudyInvitedContributor -from .study_contributor import StudyContributor - from .dataset_metadata.dataset_access import DatasetAccess +from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_consent import DatasetConsent from .dataset_metadata.dataset_contributor_affiliation import ( DatasetContributorAffiliation, @@ -23,25 +20,22 @@ from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder -from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_managing_organization import DatasetManagingOrganization from .dataset_metadata.dataset_other import DatasetOther from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights -from .dataset_metadata.dataset_title import DatasetTitle from .dataset_metadata.dataset_subject import DatasetSubject - -from model.dataset_metadata.dataset_related_item_contributor import ( - DatasetRelatedItemContributor, -) -from model.dataset_metadata.dataset_related_item_identifier import ( - DatasetRelatedItemIdentifier, -) -from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther -from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle -from model.dataset_metadata.dataset_related_item import DatasetRelatedItem - +from .dataset_metadata.dataset_title import DatasetTitle +from .dataset_versions import DatasetVersions +from .db import db +from .email_verification import EmailVerification +from .invited_study_contributor import StudyInvitedContributor +from .participant import Participant +from .study import Study, StudyException +from .study_contributor import StudyContributor +from .study_metadata.arm import Arm +from .study_metadata.identifiers import Identifiers from .study_metadata.study_arm import StudyArm from .study_metadata.study_available_ipd import StudyAvailableIpd from .study_metadata.study_contact import StudyContact @@ -58,9 +52,10 @@ from .study_metadata.study_reference import StudyReference from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus -from .study_metadata.identifiers import Identifiers -from .study_metadata.arm import Arm - +from .token_blacklist import TokenBlacklist +from .user import User +from .user_details import UserDetails +from .version import Version __all__ = [ "Study", @@ -116,4 +111,4 @@ "EmailVerification", "TokenBlacklist", "UserDetails", -] \ No newline at end of file +] diff --git a/model/dataset.py b/model/dataset.py index bbf37f11..4a925d15 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -7,6 +7,7 @@ import model from .db import db +from .study import Study class Dataset(db.Model): @@ -107,16 +108,13 @@ def last_published(self): def last_modified(self): return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() - - @staticmethod - def from_data(study, data: dict): + def from_data(study: Study, data: dict): dataset_obj = Dataset(study) - dataset_obj.update(data) + dataset_obj.update() return dataset_obj - def update(self, data: dict): + def update(self): """Creates a new dataset from a dictionary""" self.updated_on = datetime.datetime.now(timezone.utc).timestamp() # self.dataset_versions = data["dataset_versions"] - diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index cbd2505c..10e2a712 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -1,8 +1,10 @@ import uuid -from ..db import db from model import Dataset +from ..db import db + + class DatasetAccess(db.Model): def __init__(self, dataset): self.id = str(uuid.uuid4()) @@ -27,7 +29,6 @@ def to_dict(self): "url_last_checked": self.url_last_checked, } - @staticmethod def from_data(dataset: Dataset, data: dict): dataset_access = DatasetAccess(dataset) @@ -39,5 +40,3 @@ def update(self, data: dict): self.url = data["url"] self.url_last_checked = data["url_last_checked"] self.type = data["type"] - - diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 197a715e..54c01726 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -29,7 +29,7 @@ def to_dict(self): @staticmethod def from_data(dataset, data: dict): - dataset_related_item_title = DatasetRelatedItemTitle() + dataset_related_item_title = DatasetRelatedItemTitle(dataset) dataset_related_item_title.update(data) return dataset_related_item_title diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 1f13b71f..4343f2c1 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,13 +1,15 @@ import datetime import uuid +import model + from .db import db # from datetime import datetime, timezone class StudyInvitedContributor(db.Model): - def __init__(self, study, email_address, permission): + def __init__(self, study: model.Study, email_address: str, permission): self.id = str(uuid.uuid4()) self.study = study self.permission = permission diff --git a/model/participant.py b/model/participant.py index f1217dea..1adba451 100644 --- a/model/participant.py +++ b/model/participant.py @@ -49,7 +49,7 @@ def from_data(data: dict, study): participant.update(data) return participant - def update(self, data): + def update(self, data: dict): self.first_name = data["first_name"] self.last_name = data["last_name"] self.address = data["address"] diff --git a/model/study.py b/model/study.py index 90eee078..5f1a2942 100644 --- a/model/study.py +++ b/model/study.py @@ -151,6 +151,7 @@ def __init__(self): ) def to_dict(self): + """Converts the study to a dictionary""" owner = self.study_contributors.filter( model.StudyContributor.permission == "owner" ).first() @@ -158,7 +159,6 @@ def to_dict(self): model.StudyContributor.user_id == g.user.id ).first() print(contributor_permission) - """Converts the study to a dictionary""" return { "id": self.id, "title": self.title, diff --git a/model/study_contributor.py b/model/study_contributor.py index 4d02d4aa..913a6aec 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,12 +1,14 @@ import datetime +import model + from .db import db # from datetime import datetime, timezone class StudyContributor(db.Model): - def __init__(self, study, user, permission): + def __init__(self, study: model.Study, user: model.User, permission): self.study = study self.user = user self.permission = permission @@ -38,7 +40,7 @@ def to_dict(self): } @staticmethod - def from_data(study, user, permission): + def from_data(study: model.Study, user: model.User, permission): contributor = StudyContributor(study, user, permission) return contributor diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 9029eb44..85b036f0 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -2,10 +2,11 @@ import uuid from datetime import timezone -import model from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY +import model + from ..db import db diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 7b1214b8..ff8da864 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -3,6 +3,7 @@ from datetime import timezone import model + from ..db import db diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index adebbc5f..45db158a 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -2,9 +2,11 @@ import uuid from datetime import timezone -from ..db import db from model import Study +from ..db import db + + class StudyContact(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index 4b13979d..02bbdbe9 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -1,8 +1,9 @@ import uuid -from ..db import db from model import Study +from ..db import db + class StudyDescription(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 280b372d..dc545a05 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -3,9 +3,10 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from ..db import db from model import Study +from ..db import db + class StudyDesign(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 5d488add..f64ee356 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -3,9 +3,10 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from ..db import db from model import Study +from ..db import db + class StudyEligibility(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index c7c9e5e0..53447f43 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -2,9 +2,10 @@ import uuid from datetime import timezone -from ..db import db from model import Study +from ..db import db + class StudyIdentification(db.Model): def __init__(self, study, secondary): diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 4428d8d8..97c42360 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -5,9 +5,10 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from ..db import db from model import Study +from ..db import db + class StudyIntervention(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 86be8376..773e76a2 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -3,9 +3,10 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from ..db import db from model import Study +from ..db import db + class StudyIpdsharing(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index e40f08b2..041396b6 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -2,9 +2,10 @@ import uuid from datetime import timezone -from ..db import db from model import Study +from ..db import db + class StudyLink(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index f47cfd3f..0b38e23e 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -2,9 +2,10 @@ import uuid from datetime import timezone -from ..db import db from model import Study +from ..db import db + class StudyLocation(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 78e729a1..91721cda 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -3,9 +3,10 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from ..db import db from model import Study +from ..db import db + class StudyOther(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 8a725629..32a7eb5d 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -2,9 +2,10 @@ import uuid from datetime import timezone -from ..db import db from model import Study +from ..db import db + class StudyOverallOfficial(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 8b5f22da..aff4af6f 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -2,9 +2,10 @@ import uuid from datetime import timezone -from ..db import db from model import Study +from ..db import db + class StudyReference(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 65b7aa6d..873de495 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -3,9 +3,10 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from ..db import db from model import Study +from ..db import db + class StudySponsorsCollaborators(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 2d346f1b..8e586f34 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -1,8 +1,9 @@ import uuid -from ..db import db from model import Study +from ..db import db + class StudyStatus(db.Model): """A study is a collection of datasets and participants""" diff --git a/model/token_blacklist.py b/model/token_blacklist.py index 12475bd3..5aaaeb16 100644 --- a/model/token_blacklist.py +++ b/model/token_blacklist.py @@ -18,6 +18,6 @@ def from_data(data: dict): token_blacklist.update(data) return token_blacklist - def update(self, data): + def update(self, data: dict): self.jti = data["jti"] self.exp = data["exp"] diff --git a/model/user.py b/model/user.py index 2f9fe062..d11736c2 100644 --- a/model/user.py +++ b/model/user.py @@ -53,12 +53,12 @@ def update(self, data): # self.hash = data["hash"] # self.created_at = data["created_at"] - def set_password(self, password, data): + def set_password(self, password: str, data): """setting bcrypt passwords""" hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") self.hash = hashed_password - def check_password(self, password): + def check_password(self, password: str): """validates password and bcrypt hashed password""" # TODO check password length and make uppercase letter app.bcrypt.generate_password_hash(password).decode("utf-8") diff --git a/model/user_details.py b/model/user_details.py index d2ffebdb..8a45661e 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -1,5 +1,7 @@ import uuid +import model + from .db import db @@ -41,12 +43,12 @@ def to_dict(self): } @staticmethod - def from_data(user, data: dict): + def from_data(user: model.User, data: dict): user = UserDetails(user) user.update(data) return user - def update(self, data): + def update(self, data: dict): self.first_name = data["first_name"] self.last_name = data["last_name"] self.institution = data["institution"] diff --git a/model/version.py b/model/version.py index 5daf5227..005711a4 100644 --- a/model/version.py +++ b/model/version.py @@ -53,7 +53,7 @@ def from_data(dataset: Dataset, data: dict): dataset_version_obj.update(data) return dataset_version_obj - def update(self, data): + def update(self, data: dict): self.title = data["title"] self.published = data["published"] self.doi = data["doi"] diff --git a/tests/conftest.py b/tests/conftest.py index 36f022d0..e760ed8e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,11 +7,11 @@ @pytest.fixture() def app(): """An application for the tests.""" - config = { - "TESTING": True, - } + # config = { + # "TESTING": True, + # } - flask_app = create_app(config) + flask_app = create_app() flask_app.config.update( { From 06f161fe449f48ba73143d75670a4b99e3708a69 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 09:53:00 -0700 Subject: [PATCH 274/505] fix: flake8 --- apis/authentication.py | 3 +-- apis/dataset.py | 5 +++-- model/study_metadata/identifiers.py | 3 +-- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 8f313ce4..9fe05a64 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -86,8 +86,7 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=200), + "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=200), "jti": str(uuid.uuid4()), }, config.secret, diff --git a/apis/dataset.py b/apis/dataset.py index fb4980ad..0b74f0eb 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -84,8 +84,9 @@ def put(self, study_id, dataset_id): @api.response(201, "Success") @api.response(400, "Validation Error") - def delete(self, study_id, dataset_id): - study = model.Study.query.get(study_id) + def delete(self, _study_id, dataset_id): + + study = model.Study.query.get(_study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 data_obj = model.Dataset.query.get(dataset_id) diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index fa5d4cd9..4b66db50 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -24,8 +24,7 @@ def to_dict(self): for identifier in sorted_study_identifications if not identifier.secondary ] - ) - != 0 + ) != 0 else [], "secondary": [ identifier.to_dict() From 7c9780b9b196cf4fe8d96481054032671887dfd7 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 12 Oct 2023 16:54:06 +0000 Subject: [PATCH 275/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 3 ++- apis/dataset.py | 1 - model/study_metadata/identifiers.py | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 9fe05a64..8f313ce4 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -86,7 +86,8 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=200), + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=200), "jti": str(uuid.uuid4()), }, config.secret, diff --git a/apis/dataset.py b/apis/dataset.py index 0b74f0eb..d7516321 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -85,7 +85,6 @@ def put(self, study_id, dataset_id): @api.response(201, "Success") @api.response(400, "Validation Error") def delete(self, _study_id, dataset_id): - study = model.Study.query.get(_study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 4b66db50..fa5d4cd9 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -24,7 +24,8 @@ def to_dict(self): for identifier in sorted_study_identifications if not identifier.secondary ] - ) != 0 + ) + != 0 else [], "secondary": [ identifier.to_dict() From e1c517cf6b820fdf92d4e15baa1772779bb8c29f Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 10:47:26 -0700 Subject: [PATCH 276/505] fix: typecheck errors --- apis/authentication.py | 5 +++-- apis/dataset.py | 17 ++++++++--------- apis/participant.py | 4 ++-- apis/study_metadata/study_identification.py | 2 +- model/dataset.py | 2 +- model/dataset_contributor.py | 2 +- model/dataset_metadata/dataset_access.py | 2 +- .../dataset_alternate_identifier.py | 2 +- model/dataset_metadata/dataset_consent.py | 2 +- .../dataset_contributor_affiliation.py | 2 +- model/dataset_metadata/dataset_date.py | 2 +- .../dataset_metadata/dataset_de_ident_level.py | 2 +- model/dataset_metadata/dataset_description.py | 2 +- model/dataset_metadata/dataset_funder.py | 2 +- .../dataset_managing_organization.py | 2 +- model/dataset_metadata/dataset_other.py | 2 +- model/dataset_metadata/dataset_readme.py | 2 +- model/dataset_metadata/dataset_record_keys.py | 2 +- model/dataset_metadata/dataset_related_item.py | 2 +- .../dataset_related_item_contributor.py | 2 +- .../dataset_related_item_identifier.py | 2 +- .../dataset_related_item_other.py | 2 +- .../dataset_related_item_title.py | 2 +- model/dataset_metadata/dataset_rights.py | 2 +- model/dataset_metadata/dataset_subject.py | 2 +- model/dataset_metadata/dataset_title.py | 2 +- model/email_verification.py | 2 +- model/invited_study_contributor.py | 2 +- model/participant.py | 2 +- model/study.py | 2 +- model/study_contributor.py | 2 +- model/study_metadata/identifiers.py | 3 ++- model/study_metadata/study_arm.py | 4 ++-- model/study_metadata/study_available_ipd.py | 4 ++-- model/study_metadata/study_contact.py | 4 ++-- model/study_metadata/study_description.py | 4 ++-- model/study_metadata/study_design.py | 4 ++-- model/study_metadata/study_eligibility.py | 4 ++-- model/study_metadata/study_identification.py | 5 +++-- model/study_metadata/study_intervention.py | 4 ++-- model/study_metadata/study_ipdsharing.py | 4 ++-- model/study_metadata/study_link.py | 4 ++-- model/study_metadata/study_location.py | 4 ++-- model/study_metadata/study_other.py | 4 ++-- model/study_metadata/study_overall_official.py | 4 ++-- model/study_metadata/study_reference.py | 4 ++-- .../study_sponsors_collaborators.py | 4 ++-- model/study_metadata/study_status.py | 4 ++-- model/token_blacklist.py | 2 +- model/user.py | 2 +- model/user_details.py | 2 +- model/version.py | 2 +- 52 files changed, 80 insertions(+), 78 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 9fe05a64..a1deae2f 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -86,9 +86,10 @@ def post(self): encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) + datetime.timedelta(minutes=200), + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=200), # noqa: W503 "jti": str(uuid.uuid4()), - }, + }, # noqa: W503 config.secret, algorithm="HS256", ) diff --git a/apis/dataset.py b/apis/dataset.py index 0b74f0eb..99444ee2 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -66,13 +66,13 @@ def post(self, study_id): class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - def get(self, study_id, dataset_id): + def get(self, study_id: int, dataset_id: int): data_obj = model.Dataset.query.get(dataset_id) return data_obj.to_dict() @api.response(201, "Success") @api.response(400, "Validation Error") - def put(self, study_id, dataset_id): + def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("update_dataset", study): return "Access denied, you can not modify", 403 @@ -84,9 +84,8 @@ def put(self, study_id, dataset_id): @api.response(201, "Success") @api.response(400, "Validation Error") - def delete(self, _study_id, dataset_id): - - study = model.Study.query.get(_study_id) + def delete(self, study_id: int, dataset_id: int): + study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 data_obj = model.Dataset.query.get(dataset_id) @@ -97,7 +96,7 @@ def delete(self, _study_id, dataset_id): dataset_ = study.dataset return [d.to_dict() for d in dataset_], 201 - # def delete(self, study_id, dataset_id, version_id): + # def delete(self, study_id: int, dataset_id: int, version_id: int): # data_obj = Dataset.query.get(dataset_id) # for version in data_obj.dataset_versions: # db.session.delete(version) @@ -113,11 +112,11 @@ class VersionResource(Resource): @api.response(400, "Validation Error") @api.doc("dataset version") @api.marshal_with(dataset_versions_model) - def get(self, study_id, dataset_id, version_id): + def get(self, study_id: int, dataset_id: int, version_id: int): dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() - def put(self, study_id, dataset_id, version_id): + def put(self, study_id: int, dataset_id: int, version_id: int): study = model.Study.query.get(study_id) if not is_granted("publish_dataset", study): return "Access denied, you can not modify", 403 @@ -126,7 +125,7 @@ def put(self, study_id, dataset_id, version_id): model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 - def delete(self, study_id, dataset_id, version_id): + def delete(self, study_id: int, dataset_id: int, version_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 diff --git a/apis/participant.py b/apis/participant.py index e55cf730..015c7c05 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -50,7 +50,7 @@ class UpdateParticipant(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(participant_model) - def put(self, study_id, participant_id: int): + def put(self, study_id: int, participant_id: int): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 is_granted("viewer", study_id) @@ -61,7 +61,7 @@ def put(self, study_id, participant_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") - def delete(self, study_id, participant_id: int): + def delete(self, study_id: int, participant_id: int): if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 is_granted("viewer", study_id) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 37e5d86a..45fd17fd 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -46,7 +46,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - primary = data["primary"] + primary: dict = data["primary"] primary["secondary"] = False if "id" in primary and primary["id"]: diff --git a/model/dataset.py b/model/dataset.py index 4a925d15..7090d7ac 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -10,7 +10,7 @@ from .study import Study -class Dataset(db.Model): +class Dataset(db.Model): # type: ignore def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py index 844d8537..a84bba52 100644 --- a/model/dataset_contributor.py +++ b/model/dataset_contributor.py @@ -3,7 +3,7 @@ from .db import db -class DatasetContributor(db.Model): +class DatasetContributor(db.Model): # type: ignore def __init__(self): self.id = str(uuid.uuid4()) diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 10e2a712..36f29310 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -5,7 +5,7 @@ from ..db import db -class DatasetAccess(db.Model): +class DatasetAccess(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index 368ecfe2..b8069d41 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetAlternateIdentifier(db.Model): +class DatasetAlternateIdentifier(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 8fc7ac45..69d1e5ce 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetConsent(db.Model): +class DatasetConsent(db.Model): # type: ignore def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index 0362adcc..dec9a7c6 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetContributorAffiliation(db.Model): +class DatasetContributorAffiliation(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index e3716cdf..d8f9a3f3 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetDate(db.Model): +class DatasetDate(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index a83885b2..1f63d2bc 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetDeIdentLevel(db.Model): +class DatasetDeIdentLevel(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index c927ce62..e07504ec 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetDescription(db.Model): +class DatasetDescription(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index b0ed2907..b4c96a4c 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetFunder(db.Model): +class DatasetFunder(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py index f46532a8..76548f26 100644 --- a/model/dataset_metadata/dataset_managing_organization.py +++ b/model/dataset_metadata/dataset_managing_organization.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetManagingOrganization(db.Model): +class DatasetManagingOrganization(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 3dd99ddf..baafeba5 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -6,7 +6,7 @@ from ..db import db -class DatasetOther(db.Model): +class DatasetOther(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index 612e80df..889d73d2 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetReadme(db.Model): +class DatasetReadme(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 7a2003e5..0b48818f 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetRecordKeys(db.Model): +class DatasetRecordKeys(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index 1bc311e9..477c1ab5 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetRelatedItem(db.Model): +class DatasetRelatedItem(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index 1b145652..db3c3ae6 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetRelatedItemContributor(db.Model): +class DatasetRelatedItemContributor(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index f428f7ba..6a854900 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetRelatedItemIdentifier(db.Model): +class DatasetRelatedItemIdentifier(db.Model): # type: ignore def __init__(self): self.id = str(uuid.uuid4()) diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py index ca752f65..aba272d3 100644 --- a/model/dataset_metadata/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetRelatedItemOther(db.Model): +class DatasetRelatedItemOther(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 54c01726..a2355673 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetRelatedItemTitle(db.Model): +class DatasetRelatedItemTitle(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 1624594f..2b494c6d 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetRights(db.Model): +class DatasetRights(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 2bfe2216..12d7d24d 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetSubject(db.Model): +class DatasetSubject(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index 9f02ec91..b120cbcc 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -3,7 +3,7 @@ from ..db import db -class DatasetTitle(db.Model): +class DatasetTitle(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset diff --git a/model/email_verification.py b/model/email_verification.py index 81cb3ca8..8961b8f2 100644 --- a/model/email_verification.py +++ b/model/email_verification.py @@ -4,7 +4,7 @@ from .db import db -class EmailVerification(db.Model): +class EmailVerification(db.Model): # type: ignore def __init__(self): self.created_at = datetime.datetime.now(timezone.utc).timestamp() diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 4343f2c1..c310bae2 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -8,7 +8,7 @@ # from datetime import datetime, timezone -class StudyInvitedContributor(db.Model): +class StudyInvitedContributor(db.Model): # type: ignore def __init__(self, study: model.Study, email_address: str, permission): self.id = str(uuid.uuid4()) self.study = study diff --git a/model/participant.py b/model/participant.py index 1adba451..0c71b270 100644 --- a/model/participant.py +++ b/model/participant.py @@ -7,7 +7,7 @@ from .db import db -class Participant(db.Model): +class Participant(db.Model): # type: ignore def __init__(self, study): self.study = study self.id = str(uuid.uuid4()) diff --git a/model/study.py b/model/study.py index 5f1a2942..0678e161 100644 --- a/model/study.py +++ b/model/study.py @@ -15,7 +15,7 @@ class StudyException(Exception): pass -class Study(db.Model): +class Study(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self): diff --git a/model/study_contributor.py b/model/study_contributor.py index 913a6aec..2ace9e81 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -7,7 +7,7 @@ # from datetime import datetime, timezone -class StudyContributor(db.Model): +class StudyContributor(db.Model): # type: ignore def __init__(self, study: model.Study, user: model.User, permission): self.study = study self.user = user diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 4b66db50..18b9e6b5 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -24,7 +24,8 @@ def to_dict(self): for identifier in sorted_study_identifications if not identifier.secondary ] - ) != 0 + ) + != 0 # noqa: W503 else [], "secondary": [ identifier.to_dict() diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 85b036f0..01a9eed6 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -10,7 +10,7 @@ from ..db import db -class StudyArm(db.Model): +class StudyArm(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -60,5 +60,5 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index ff8da864..2a6e8637 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -7,7 +7,7 @@ from ..db import db -class StudyAvailableIpd(db.Model): +class StudyAvailableIpd(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -57,5 +57,5 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 45db158a..4f943e6f 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -7,7 +7,7 @@ from ..db import db -class StudyContact(db.Model): +class StudyContact(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study: Study, role, central_contact): @@ -68,5 +68,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index 02bbdbe9..a39f72e0 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -5,7 +5,7 @@ from ..db import db -class StudyDescription(db.Model): +class StudyDescription(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study: Study): @@ -49,5 +49,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index dc545a05..d09eb5e2 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -8,7 +8,7 @@ from ..db import db -class StudyDesign(db.Model): +class StudyDesign(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study: Study): @@ -119,5 +119,5 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index f64ee356..886be8e8 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -8,7 +8,7 @@ from ..db import db -class StudyEligibility(db.Model): +class StudyEligibility(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study: Study): @@ -95,5 +95,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 53447f43..1ac03348 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -7,7 +7,8 @@ from ..db import db -class StudyIdentification(db.Model): +class StudyIdentification(db.Model): # type: ignore + def __init__(self, study, secondary): self.id = str(uuid.uuid4()) self.study = study @@ -56,5 +57,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 97c42360..7ca82b89 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -10,7 +10,7 @@ from ..db import db -class StudyIntervention(db.Model): +class StudyIntervention(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -64,5 +64,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 773e76a2..21d18ef4 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -8,7 +8,7 @@ from ..db import db -class StudyIpdsharing(db.Model): +class StudyIpdsharing(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -68,5 +68,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 041396b6..946704d2 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -7,7 +7,7 @@ from ..db import db -class StudyLink(db.Model): +class StudyLink(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -52,5 +52,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 0b38e23e..808a1401 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -7,7 +7,7 @@ from ..db import db -class StudyLocation(db.Model): +class StudyLocation(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -64,5 +64,5 @@ def update(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 91721cda..4bd272aa 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -8,7 +8,7 @@ from ..db import db -class StudyOther(db.Model): +class StudyOther(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -60,5 +60,5 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 32a7eb5d..0035fe2e 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -7,7 +7,7 @@ from ..db import db -class StudyOverallOfficial(db.Model): +class StudyOverallOfficial(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -55,5 +55,5 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index aff4af6f..d64706c4 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -7,7 +7,7 @@ from ..db import db -class StudyReference(db.Model): +class StudyReference(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -55,5 +55,5 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 873de495..1ca59401 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -8,7 +8,7 @@ from ..db import db -class StudySponsorsCollaborators(db.Model): +class StudySponsorsCollaborators(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -85,5 +85,5 @@ def update_collaborators(self, data: dict): def validate(self): """Validates the lead_sponsor_last_name study""" - violations = [] + violations: list = [] return violations diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 8e586f34..c2003bb7 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -5,7 +5,7 @@ from ..db import db -class StudyStatus(db.Model): +class StudyStatus(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -65,5 +65,5 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] return violations diff --git a/model/token_blacklist.py b/model/token_blacklist.py index 5aaaeb16..8bb830e0 100644 --- a/model/token_blacklist.py +++ b/model/token_blacklist.py @@ -1,7 +1,7 @@ from .db import db -class TokenBlacklist(db.Model): +class TokenBlacklist(db.Model): # type: ignore __tablename__ = "token_blacklist" jti = db.Column(db.CHAR(36), primary_key=True) exp = db.Column(db.String, nullable=False) diff --git a/model/user.py b/model/user.py index d11736c2..19b53299 100644 --- a/model/user.py +++ b/model/user.py @@ -9,7 +9,7 @@ # from datetime import datetime, timezone -class User(db.Model): +class User(db.Model): # type: ignore def __init__(self, password, data): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() diff --git a/model/user_details.py b/model/user_details.py index 8a45661e..4a87c690 100644 --- a/model/user_details.py +++ b/model/user_details.py @@ -5,7 +5,7 @@ from .db import db -class UserDetails(db.Model): +class UserDetails(db.Model): # type: ignore def __init__(self, user): self.id = str(uuid.uuid4()) self.first_name = "" diff --git a/model/version.py b/model/version.py index 005711a4..c7dc0541 100644 --- a/model/version.py +++ b/model/version.py @@ -14,7 +14,7 @@ ) -class Version(db.Model): +class Version(db.Model): # type: ignore def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) From aef5f17745a8a6d8645162b4967d2fc1f71ad307 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 11:13:07 -0700 Subject: [PATCH 277/505] fix: typecheck errors for apis --- apis/authentication.py | 4 ++-- apis/contributor.py | 4 ++-- apis/study_metadata/study_arm.py | 6 ++++-- apis/study_metadata/study_available_ipd.py | 4 +++- apis/study_metadata/study_contact.py | 4 +++- apis/study_metadata/study_identification.py | 4 +++- apis/study_metadata/study_intervention.py | 4 +++- apis/study_metadata/study_link.py | 4 +++- apis/study_metadata/study_location.py | 4 +++- apis/study_metadata/study_other.py | 6 ++++-- apis/study_metadata/study_overall_official.py | 4 +++- apis/study_metadata/study_reference.py | 4 +++- apis/study_metadata/study_sponsors_collaborators.py | 4 +++- app.py | 2 +- model/dataset_versions.py | 7 +++---- model/invited_study_contributor.py | 5 ++--- model/study_contributor.py | 8 ++++---- model/version.py | 2 +- 18 files changed, 50 insertions(+), 30 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index a1deae2f..f6fd033f 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -41,7 +41,7 @@ class SignUpUser(Resource): @api.expect(signup_model) def post(self): """signs up the new users and saves data in DB""" - data = request.json + data: dict = request.json # TODO data[email doesnt exist then raise error; json validation library pattern = r"^[\w\.-]+@[\w\.-]+\.\w+$" if not data["email_address"] or not re.match(pattern, data["email_address"]): @@ -108,7 +108,7 @@ def authentication(): if "token" not in request.cookies: return - token = request.cookies.get("token") + token: str | bytes = request.cookies.get("token") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: diff --git a/apis/contributor.py b/apis/contributor.py index c66f2ce7..2b9aca39 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -42,7 +42,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify", 403 - data = request.json + data: dict = request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] @@ -86,7 +86,7 @@ def put(self, study_id: int, user_id: int): model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() # Order should go from the least privileged to the most privileged - grants = OrderedDict() + grants: OrderedDict[str, list | list[str]] = OrderedDict() grants["viewer"] = [] grants["editor"] = ["viewer"] grants["admin"] = ["viewer", "editor", "admin"] diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 568ee24e..ed747710 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,4 +1,6 @@ """API routes for study arm metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -44,10 +46,10 @@ def get(self, study_id): def post(self, study_id): """Create study arm metadata""" - study = model.Study.query.get(study_id) + study: model.Study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data = request.json + data: dict | typing.Any = request.json study_obj = model.Study.query.get(study_id) for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 98415a38..cb44df71 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,4 +1,6 @@ """API routes for study available ipd metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -48,7 +50,7 @@ def post(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data = request.json + data: dict | typing.Any = request.json study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 65e91721..5683624a 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -1,4 +1,6 @@ """API routes for study contact metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -45,7 +47,7 @@ def post(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data = request.json + data: dict | typing.Any = request.json study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 45fd17fd..05589303 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,4 +1,6 @@ """API routes for study identification metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -41,7 +43,7 @@ def get(self, study_id: int): @api.expect(study_identification) def post(self, study_id: int): """Create study identification metadata""" - data = request.json + data: dict | typing.Any = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 90b2248e..8eb08628 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -1,4 +1,6 @@ """API routes for study intervention metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -47,7 +49,7 @@ def post(self, study_id: int): if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 list_of_elements = [] - data = request.json + data: dict | typing.Any = request.json for i in data: if "id" in i and i["id"]: study_intervention_ = model.StudyIntervention.query.get(i["id"]) diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 0dd0399a..8d6f9cbb 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -1,4 +1,6 @@ """API routes for study link metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -38,7 +40,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data = request.json + data: dict | typing.Any = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 074c0379..7148a02f 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -1,4 +1,6 @@ """API routes for study location metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -45,7 +47,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data = request.json + data: dict | typing.Any = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 46bdc901..3cd9754a 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -1,4 +1,6 @@ """API routes for study other metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -68,7 +70,7 @@ def put(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data = request.json + data: dict | typing.Any = request.json study_oversight = study_obj.study_other.oversight_has_dmc = data[ "oversight_has_dmc" ] @@ -97,7 +99,7 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study conditions metadata""" - data = request.json + data: dict | typing.Any = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index e083c8ef..e3e233da 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,4 +1,6 @@ """API routes for study overall official metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -45,7 +47,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def post(self, study_id: int): """Create study overall official metadata""" - data = request.json + data: dict | typing.Any = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 6f40abb7..c7a7b82b 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,4 +1,6 @@ """API routes for study reference metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -46,7 +48,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data = request.json + data: dict | typing.Any = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 46a9527c..f78fc650 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -1,4 +1,6 @@ """API routes for study sponsors and collaborators metadata""" +import typing + from flask import request from flask_restx import Resource, fields @@ -75,7 +77,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def put(self, study_id: int): """updating study collaborators""" - data = request.json + data: dict | typing.Any = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/app.py b/app.py index cf2831ac..aaf200f3 100644 --- a/app.py +++ b/app.py @@ -125,7 +125,7 @@ def on_after_request(resp): # print(request.cookies.get("token")) if "token" not in request.cookies: return resp - token = request.cookies.get("token") + token: str | bytes = request.cookies.get("token") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: diff --git a/model/dataset_versions.py b/model/dataset_versions.py index 6ee5e780..e0cf9577 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -1,11 +1,10 @@ -import model - +from .version import Version class DatasetVersions: def __init__( self, - last_published: model.Version, - last_modified: model.Version, + last_published: Version, + last_modified: Version, id: str, ): self.latest_version = last_modified.id diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index c310bae2..77a15de2 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,15 +1,14 @@ import datetime import uuid -import model - +from .study import Study from .db import db # from datetime import datetime, timezone class StudyInvitedContributor(db.Model): # type: ignore - def __init__(self, study: model.Study, email_address: str, permission): + def __init__(self, study: Study, email_address: str, permission): self.id = str(uuid.uuid4()) self.study = study self.permission = permission diff --git a/model/study_contributor.py b/model/study_contributor.py index 2ace9e81..1e651012 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,14 +1,14 @@ import datetime -import model - +from .user import User +from .study import Study from .db import db # from datetime import datetime, timezone class StudyContributor(db.Model): # type: ignore - def __init__(self, study: model.Study, user: model.User, permission): + def __init__(self, study: Study, user: User, permission): self.study = study self.user = user self.permission = permission @@ -40,7 +40,7 @@ def to_dict(self): } @staticmethod - def from_data(study: model.Study, user: model.User, permission): + def from_data(study: Study, user: User, permission): contributor = StudyContributor(study, user, permission) return contributor diff --git a/model/version.py b/model/version.py index c7dc0541..005711a4 100644 --- a/model/version.py +++ b/model/version.py @@ -14,7 +14,7 @@ ) -class Version(db.Model): # type: ignore +class Version(db.Model): def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) From 7428511636e84968a784dd396b6267df4bd072fd Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 11:55:40 -0700 Subject: [PATCH 278/505] fix: typecheck errors for apis dataset metadata --- apis/contributor.py | 9 +++++---- apis/dataset.py | 7 +++++-- apis/dataset_metadata/dataset_alternate_identifier.py | 4 +++- apis/dataset_metadata/dataset_description.py | 4 +++- apis/dataset_metadata/dataset_funder.py | 4 +++- apis/dataset_metadata/dataset_related_item.py | 4 +++- apis/dataset_metadata/dataset_rights.py | 4 +++- apis/dataset_metadata/dataset_subject.py | 4 +++- apis/dataset_metadata/dataset_title.py | 4 +++- 9 files changed, 31 insertions(+), 13 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 2b9aca39..0f183dc1 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,3 +1,4 @@ +import typing from collections import OrderedDict from flask import g, request @@ -42,7 +43,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify", 403 - data: dict = request.json + data: dict | typing.Any= request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] @@ -78,7 +79,7 @@ def put(self, study_id: int, user_id: int): user = model.User.query.get(user_id) if not user: return "user not found", 404 - permission = data["role"] + permission: dict | typing.Any= data["role"] grantee = model.StudyContributor.query.filter( model.StudyContributor.user == user, model.StudyContributor.study == study ).first() @@ -123,7 +124,7 @@ def delete(self, study_id: int, user_id: str): ).first() if not granter: return "you are not contributor of this study", 403 - grants = OrderedDict() + grants: OrderedDict[str, list | list[str]] = OrderedDict() grants["viewer"] = [] grants["editor"] = [] grants["admin"] = ["viewer", "editor"] @@ -133,7 +134,7 @@ def delete(self, study_id: int, user_id: str): invited_grantee = model.StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() - invited_grants = OrderedDict() + invited_grants: OrderedDict[str, list | list[str]] = OrderedDict() invited_grants["viewer"] = [] invited_grants["editor"] = [] invited_grants["admin"] = ["viewer", "editor", "admin"] diff --git a/apis/dataset.py b/apis/dataset.py index 99444ee2..6efdbdf0 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,3 +1,5 @@ +import typing + from flask import Response, jsonify, request from flask_restx import Namespace, Resource, fields @@ -53,7 +55,8 @@ def post(self, study_id): if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 # todo if study.participant id== different study Throw error - dataset_ = model.Dataset.from_data(study, request.json) + data: dict = request.json + dataset_ = model.Dataset.from_data(study, data) model.db.session.add(dataset_) model.db.session.commit() return dataset_.to_dict() @@ -146,7 +149,7 @@ def post(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("publish_version", study): return "Access denied, you can not modify", 403 - data = request.json + data: dict | typing.Any = request.json data["participants"] = [ model.Participant.query.get(i) for i in data["participants"] ] diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index d5d6c5b2..0cbe9532 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import request from flask_restx import Resource, fields @@ -27,7 +29,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_identifier_] def post(self, study_id: int, dataset_id: int): - data = request.json + data: Any | dict = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 46253c9a..6bc792ff 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import request from flask_restx import Resource, fields @@ -26,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_description_] def post(self, study_id: int, dataset_id: int): - data = request.json + data: Any | dict = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 03a840af..8dde61eb 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import request from flask_restx import Resource, fields @@ -31,7 +33,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_funder_] def post(self, study_id: int, dataset_id: int): - data = request.json + data: Any | dict = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_funder_ = model.DatasetFunder.from_data(data_obj, data) model.db.session.add(dataset_funder_) diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 2aebf8a9..4d5e8e42 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import request from flask_restx import Resource, fields @@ -26,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_related_item_] def post(self, study_id: int, dataset_id: int): - data = request.json + data: Any | dict = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_related_item_ = model.DatasetRelatedItem.from_data(data_obj, data) model.db.session.add(dataset_related_item_) diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 06b12136..4468d6d3 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import request from flask_restx import Resource, fields @@ -29,7 +31,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_rights_] def post(self, study_id: int, dataset_id: int): - data = request.json + data: Any | dict = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_rights_ = model.DatasetRights.from_data(data_obj, data) model.db.session.add(dataset_rights_) diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index e9cc567d..c1c83049 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import request from flask_restx import Resource, fields @@ -30,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_subject_] def post(self, study_id: int, dataset_id: int): - data = request.json + data: Any | dict = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_subject_ = model.DatasetSubject.from_data(data_obj, data) model.db.session.add(dataset_subject_) diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 6b394cb0..876dcefd 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import request from flask_restx import Resource, fields @@ -27,7 +29,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_title_] def post(self, study_id: int, dataset_id: int): - data = request.json + data: Any | dict = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: From d9b831915e47a30dbdd674e4bc2fbc81322a5d18 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 13:32:56 -0700 Subject: [PATCH 279/505] fix: update token exp time --- apis/authentication.py | 19 +++++++++---------- app.py | 22 ++++++++++++---------- 2 files changed, 21 insertions(+), 20 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index f6fd033f..38d1113f 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -2,12 +2,12 @@ import re import uuid from datetime import timezone - +from typing import Any +import config import jwt from flask import g, make_response, request from flask_restx import Namespace, Resource, fields -import config import model api = Namespace("Authentication", description="Authentication paths", path="/") @@ -41,7 +41,7 @@ class SignUpUser(Resource): @api.expect(signup_model) def post(self): """signs up the new users and saves data in DB""" - data: dict = request.json + data: Any | dict = request.json # TODO data[email doesnt exist then raise error; json validation library pattern = r"^[\w\.-]+@[\w\.-]+\.\w+$" if not data["email_address"] or not re.match(pattern, data["email_address"]): @@ -72,7 +72,7 @@ class Login(Resource): def post(self): """logs in user and handles few authentication errors. Also, it sets token for logged user along with expiration date""" - data = request.json + data: Any | dict = request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).one_or_none() if not user: @@ -81,13 +81,12 @@ def post(self): if not validate_pass: return "Invalid credentials", 401 else: - if len(config.secret) < 14: - raise "secret key should contain at least 14 characters" + encoded_jwt_code = jwt.encode( { "user": user.id, "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=200), # noqa: W503 + + datetime.timedelta(minutes=180), # noqa: W503 "jti": str(uuid.uuid4()), }, # noqa: W503 config.secret, @@ -97,7 +96,7 @@ def post(self): resp.set_cookie( "token", encoded_jwt_code, secure=True, httponly=True, samesite="lax" ) - resp.status = 200 + resp.status_code = 200 return resp @@ -108,7 +107,7 @@ def authentication(): if "token" not in request.cookies: return - token: str | bytes = request.cookies.get("token") + token: str | None = request.cookies.get("token") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: @@ -220,7 +219,7 @@ def post(self): samesite="lax", expires=datetime.datetime.now(timezone.utc), ) - resp.status = 204 + resp.status_code = 204 return resp diff --git a/app.py b/app.py index aaf200f3..728a860f 100644 --- a/app.py +++ b/app.py @@ -1,7 +1,6 @@ """Entry point for the application.""" import datetime from datetime import timezone - import jwt from flask import Flask, request from flask_bcrypt import Bcrypt @@ -37,8 +36,8 @@ def create_app(): app.config.from_prefixed_env("FAIRHUB") # print(app.config) - - # TODO: add a check for secret key + if config.secret and len(config.secret) < 14: + raise "secret key should contain at least 14 characters" # type: ignore # E0702 if "DATABASE_URL" in app.config: # if "TESTING" in app_config and app_config["TESTING"]: @@ -75,11 +74,13 @@ def create_app(): # app.config[ # "CORS_ALLOW_HEADERS" - # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, + # Access-Control-Allow-Credentials" # app.config["CORS_SUPPORTS_CREDENTIALS"] = True # app.config[ # "CORS_EXPOSE_HEADERS" - # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, + # Access-Control-Allow-Credentials" # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) @@ -143,7 +144,7 @@ def on_after_request(resp): resp.delete_cookie("token") return resp expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( - minutes=10 + minutes=180 ) new_token = jwt.encode( {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, @@ -156,10 +157,12 @@ def on_after_request(resp): # resp.headers["Access-Control-Allow-Credentials"] = "true" # resp.headers[ # "Access-Control-Allow-Headers" - # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, + # Access-Control-Allow-Credentials" # resp.headers[ # "Access-Control-Expose-Headers" - # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, + # Access-Control-Allow-Credentials" print(resp.headers) @@ -171,9 +174,9 @@ def validation_exception_handler(error): @app.cli.command("destroy-schema") def destroy_schema(): + """Create the database schema.""" engine = model.db.session.get_bind() with engine.begin(): - """Create the database schema.""" model.db.drop_all() with app.app_context(): @@ -184,7 +187,6 @@ def destroy_schema(): # print(table_names) if len(table_names) == 0: with engine.begin(): - """Create the database schema.""" model.db.create_all() return app From cfba6580e5bb178528f77d03c7f62b2609c0d01b Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 13:33:10 -0700 Subject: [PATCH 280/505] fix: typecheck errors --- apis/contributor.py | 12 ++++++------ apis/dataset.py | 2 +- apis/study.py | 5 ++++- model/dataset.py | 2 +- model/participant.py | 4 ++-- model/study.py | 2 +- model/version.py | 2 +- 7 files changed, 16 insertions(+), 13 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index 0f183dc1..46095461 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,4 +1,4 @@ -import typing +from typing import Any, List from collections import OrderedDict from flask import g, request @@ -43,7 +43,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify", 403 - data: dict | typing.Any= request.json + data: dict | Any = request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] @@ -75,11 +75,11 @@ def put(self, study_id: int, user_id: int): "Access denied, you are not authorized to change this permission", 403, ) - data = request.json + data: Any | dict = request.json user = model.User.query.get(user_id) if not user: return "user not found", 404 - permission: dict | typing.Any= data["role"] + permission: Any | str = data["role"] grantee = model.StudyContributor.query.filter( model.StudyContributor.user == user, model.StudyContributor.study == study ).first() @@ -87,7 +87,7 @@ def put(self, study_id: int, user_id: int): model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() # Order should go from the least privileged to the most privileged - grants: OrderedDict[str, list | list[str]] = OrderedDict() + grants: OrderedDict[str, List[str]] = OrderedDict() grants["viewer"] = [] grants["editor"] = ["viewer"] grants["admin"] = ["viewer", "editor", "admin"] @@ -101,7 +101,7 @@ def put(self, study_id: int, user_id: int): # TODO: Owners downgrading themselves if user != g.user: grantee_level = list(grants.keys()).index(grantee.permission) # 1 - new_level = list(grants.keys()).index(permission) # 2 + new_level: int = list(grants.keys()).index(str(permission)) # 2 granter_level = list(grants.keys()).index(granter.permission) # 2 if granter_level <= grantee_level and new_level <= grantee_level: return ( diff --git a/apis/dataset.py b/apis/dataset.py index 6efdbdf0..fee61b7b 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -149,7 +149,7 @@ def post(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("publish_version", study): return "Access denied, you can not modify", 403 - data: dict | typing.Any = request.json + data: typing.Any | dict = request.json data["participants"] = [ model.Participant.query.get(i) for i in data["participants"] ] diff --git a/apis/study.py b/apis/study.py index 85d5ff4b..f7a0ce20 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,3 +1,5 @@ +from typing import Any + from flask import g, request from flask_restx import Namespace, Resource, fields @@ -41,7 +43,8 @@ def get(self): @api.response(200, "Success") @api.response(400, "Validation Error") def post(self): - add_study = model.Study.from_data(request.json) + data: Any | dict = request.json + add_study = model.Study.from_data(data) model.db.session.add(add_study) study_id = add_study.id study_ = model.Study.query.get(study_id) diff --git a/model/dataset.py b/model/dataset.py index 7090d7ac..885509cc 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -106,7 +106,7 @@ def last_published(self): ) def last_modified(self): - return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() + return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() # type: ignore @staticmethod def from_data(study: Study, data: dict): diff --git a/model/participant.py b/model/participant.py index 0c71b270..cf69927f 100644 --- a/model/participant.py +++ b/model/participant.py @@ -3,7 +3,7 @@ from datetime import timezone import model - +from .study import Study from .db import db @@ -44,7 +44,7 @@ def to_dict(self): } @staticmethod - def from_data(data: dict, study): + def from_data(data: dict, study: Study): participant = Participant(study) participant.update(data) return participant diff --git a/model/study.py b/model/study.py index 0678e161..be9ae291 100644 --- a/model/study.py +++ b/model/study.py @@ -194,7 +194,7 @@ def update(self, data: dict): def validate(self): """Validates the study""" - violations = [] + violations: list = [] # if self.description.trim() == "": # violations.push("A description is required") # if self.keywords.length < 1: diff --git a/model/version.py b/model/version.py index 005711a4..c7dc0541 100644 --- a/model/version.py +++ b/model/version.py @@ -14,7 +14,7 @@ ) -class Version(db.Model): +class Version(db.Model): # type: ignore def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) From 389dccd7128cfb27d22f62646d98fad83a740b48 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 12 Oct 2023 21:03:04 +0000 Subject: [PATCH 281/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 1 - model/dataset_versions.py | 1 + model/study_contributor.py | 2 +- model/study_metadata/study_identification.py | 1 - 4 files changed, 2 insertions(+), 3 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 38d1113f..5736a810 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -81,7 +81,6 @@ def post(self): if not validate_pass: return "Invalid credentials", 401 else: - encoded_jwt_code = jwt.encode( { "user": user.id, diff --git a/model/dataset_versions.py b/model/dataset_versions.py index e0cf9577..926311ef 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -1,5 +1,6 @@ from .version import Version + class DatasetVersions: def __init__( self, diff --git a/model/study_contributor.py b/model/study_contributor.py index 1e651012..1f04b007 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,7 +1,7 @@ import datetime from .user import User -from .study import Study +from .study import Study from .db import db # from datetime import datetime, timezone diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 1ac03348..5f31affb 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -8,7 +8,6 @@ class StudyIdentification(db.Model): # type: ignore - def __init__(self, study, secondary): self.id = str(uuid.uuid4()) self.study = study From 2cb2c4bd5c81c6c222c7f0bc6f8b0dcee5074bb2 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 14:04:36 -0700 Subject: [PATCH 282/505] fix: typecheck errors --- model/dataset_versions.py | 9 +++++---- model/study_contributor.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/model/dataset_versions.py b/model/dataset_versions.py index e0cf9577..d98c1145 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -1,5 +1,6 @@ from .version import Version + class DatasetVersions: def __init__( self, @@ -24,10 +25,10 @@ def to_dict(self): @staticmethod def from_data(data: dict): - dataset_versions = DatasetVersions() - dataset_versions.id = data["id"] + dataset_versions = DatasetVersions( + id=data["id"], + last_published=data["last_published"], + last_modified=data["last_modified"]) dataset_versions.latest_version = data["latest_version"] - dataset_versions.last_modified = data["last_modified"] - dataset_versions.last_published = data["last_published"] dataset_versions.published_version = data["published_version"] return dataset_versions diff --git a/model/study_contributor.py b/model/study_contributor.py index 1e651012..1f04b007 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,7 +1,7 @@ import datetime from .user import User -from .study import Study +from .study import Study from .db import db # from datetime import datetime, timezone From ea69ad17c863e7040ffb25d691057e33921756d5 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 12 Oct 2023 21:05:23 +0000 Subject: [PATCH 283/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/dataset_versions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/model/dataset_versions.py b/model/dataset_versions.py index d98c1145..3f3fe427 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -28,7 +28,8 @@ def from_data(data: dict): dataset_versions = DatasetVersions( id=data["id"], last_published=data["last_published"], - last_modified=data["last_modified"]) + last_modified=data["last_modified"], + ) dataset_versions.latest_version = data["latest_version"] dataset_versions.published_version = data["published_version"] return dataset_versions From 35ca4fdddc76a1670c63852809f6a8c8093b198d Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 14:29:44 -0700 Subject: [PATCH 284/505] fix: union types --- apis/authentication.py | 8 ++++---- apis/contributor.py | 14 +++++++------- apis/dataset.py | 2 +- .../dataset_alternate_identifier.py | 4 ++-- apis/dataset_metadata/dataset_description.py | 4 ++-- apis/dataset_metadata/dataset_funder.py | 4 ++-- apis/dataset_metadata/dataset_related_item.py | 4 ++-- apis/dataset_metadata/dataset_rights.py | 4 ++-- apis/dataset_metadata/dataset_subject.py | 4 ++-- apis/dataset_metadata/dataset_title.py | 4 ++-- apis/study.py | 4 ++-- apis/study_metadata/study_arm.py | 2 +- apis/study_metadata/study_available_ipd.py | 3 +-- apis/study_metadata/study_contact.py | 2 +- apis/study_metadata/study_identification.py | 2 +- apis/study_metadata/study_intervention.py | 2 +- apis/study_metadata/study_link.py | 2 +- apis/study_metadata/study_location.py | 2 +- apis/study_metadata/study_other.py | 2 +- apis/study_metadata/study_overall_official.py | 2 +- apis/study_metadata/study_reference.py | 2 +- .../study_metadata/study_sponsors_collaborators.py | 2 +- app.py | 4 +++- 23 files changed, 42 insertions(+), 41 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 5736a810..33b4758a 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -2,7 +2,7 @@ import re import uuid from datetime import timezone -from typing import Any +from typing import Any, Union import config import jwt from flask import g, make_response, request @@ -41,7 +41,7 @@ class SignUpUser(Resource): @api.expect(signup_model) def post(self): """signs up the new users and saves data in DB""" - data: Any | dict = request.json + data: Union[Any | dict] = request.json # TODO data[email doesnt exist then raise error; json validation library pattern = r"^[\w\.-]+@[\w\.-]+\.\w+$" if not data["email_address"] or not re.match(pattern, data["email_address"]): @@ -72,7 +72,7 @@ class Login(Resource): def post(self): """logs in user and handles few authentication errors. Also, it sets token for logged user along with expiration date""" - data: Any | dict = request.json + data: Union[Any | dict] = request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).one_or_none() if not user: @@ -106,7 +106,7 @@ def authentication(): if "token" not in request.cookies: return - token: str | None = request.cookies.get("token") + token: Union[str | None] = request.cookies.get("token") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: diff --git a/apis/contributor.py b/apis/contributor.py index 46095461..0e02b85c 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any, List, Union from collections import OrderedDict from flask import g, request @@ -43,7 +43,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify", 403 - data: dict | Any = request.json + data: Union[dict | Any] = request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] @@ -75,11 +75,11 @@ def put(self, study_id: int, user_id: int): "Access denied, you are not authorized to change this permission", 403, ) - data: Any | dict = request.json + data: Union[Any | dict] = request.json user = model.User.query.get(user_id) if not user: return "user not found", 404 - permission: Any | str = data["role"] + permission: Union[Any | str] = data["role"] grantee = model.StudyContributor.query.filter( model.StudyContributor.user == user, model.StudyContributor.study == study ).first() @@ -87,7 +87,7 @@ def put(self, study_id: int, user_id: int): model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() # Order should go from the least privileged to the most privileged - grants: OrderedDict[str, List[str]] = OrderedDict() + grants: Union[OrderedDict[str, List[str]]] = OrderedDict() grants["viewer"] = [] grants["editor"] = ["viewer"] grants["admin"] = ["viewer", "editor", "admin"] @@ -124,7 +124,7 @@ def delete(self, study_id: int, user_id: str): ).first() if not granter: return "you are not contributor of this study", 403 - grants: OrderedDict[str, list | list[str]] = OrderedDict() + grants: Union[OrderedDict[str, list | list[str]]] = OrderedDict() grants["viewer"] = [] grants["editor"] = [] grants["admin"] = ["viewer", "editor"] @@ -134,7 +134,7 @@ def delete(self, study_id: int, user_id: str): invited_grantee = model.StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() - invited_grants: OrderedDict[str, list | list[str]] = OrderedDict() + invited_grants: Union[OrderedDict[str, list | List[str]]] = OrderedDict() invited_grants["viewer"] = [] invited_grants["editor"] = [] invited_grants["admin"] = ["viewer", "editor", "admin"] diff --git a/apis/dataset.py b/apis/dataset.py index fee61b7b..13ffab20 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -149,7 +149,7 @@ def post(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("publish_version", study): return "Access denied, you can not modify", 403 - data: typing.Any | dict = request.json + data: typing.Union[typing.Any | dict] = request.json data["participants"] = [ model.Participant.query.get(i) for i in data["participants"] ] diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 0cbe9532..0575ed73 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import request from flask_restx import Resource, fields @@ -29,7 +29,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_identifier_] def post(self, study_id: int, dataset_id: int): - data: Any | dict = request.json + data: Union[Any | dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 6bc792ff..b0a90acc 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import request from flask_restx import Resource, fields @@ -28,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_description_] def post(self, study_id: int, dataset_id: int): - data: Any | dict = request.json + data: Union[Any | dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 8dde61eb..561015e0 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import request from flask_restx import Resource, fields @@ -33,7 +33,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_funder_] def post(self, study_id: int, dataset_id: int): - data: Any | dict = request.json + data: Union[Any | dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_funder_ = model.DatasetFunder.from_data(data_obj, data) model.db.session.add(dataset_funder_) diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 4d5e8e42..b0944e6b 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import request from flask_restx import Resource, fields @@ -28,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_related_item_] def post(self, study_id: int, dataset_id: int): - data: Any | dict = request.json + data: Union[Any | dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_related_item_ = model.DatasetRelatedItem.from_data(data_obj, data) model.db.session.add(dataset_related_item_) diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 4468d6d3..e00e0eda 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import request from flask_restx import Resource, fields @@ -31,7 +31,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_rights_] def post(self, study_id: int, dataset_id: int): - data: Any | dict = request.json + data: Union[Any | dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_rights_ = model.DatasetRights.from_data(data_obj, data) model.db.session.add(dataset_rights_) diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index c1c83049..f00a9d3d 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import request from flask_restx import Resource, fields @@ -32,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_subject_] def post(self, study_id: int, dataset_id: int): - data: Any | dict = request.json + data: Union[Any | dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_subject_ = model.DatasetSubject.from_data(data_obj, data) model.db.session.add(dataset_subject_) diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 876dcefd..f62cd2cc 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import request from flask_restx import Resource, fields @@ -29,7 +29,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_title_] def post(self, study_id: int, dataset_id: int): - data: Any | dict = request.json + data: Union[Any | dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/study.py b/apis/study.py index f7a0ce20..156d9553 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Union from flask import g, request from flask_restx import Namespace, Resource, fields @@ -43,7 +43,7 @@ def get(self): @api.response(200, "Success") @api.response(400, "Validation Error") def post(self): - data: Any | dict = request.json + data: Union[Any | dict] = request.json add_study = model.Study.from_data(data) model.db.session.add(add_study) study_id = add_study.id diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index ed747710..1cffa885 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -49,7 +49,7 @@ def post(self, study_id): study: model.Study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json study_obj = model.Study.query.get(study_id) for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index cb44df71..de3f9417 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -50,8 +50,7 @@ def post(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data: dict | typing.Any = request.json - + data: typing.Union[dict | typing.Any] = request.json study_obj = model.Study.query.get(study_id) list_of_elements = [] diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 5683624a..5985cc8d 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -47,7 +47,7 @@ def post(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 05589303..f0c8a83e 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -43,7 +43,7 @@ def get(self, study_id: int): @api.expect(study_identification) def post(self, study_id: int): """Create study identification metadata""" - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 8eb08628..f62ffd20 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -49,7 +49,7 @@ def post(self, study_id: int): if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 list_of_elements = [] - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json for i in data: if "id" in i and i["id"]: study_intervention_ = model.StudyIntervention.query.get(i["id"]) diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 8d6f9cbb..a57e6b58 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -40,7 +40,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 7148a02f..996a362d 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -47,7 +47,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 3cd9754a..5420f1da 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -70,7 +70,7 @@ def put(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json study_oversight = study_obj.study_other.oversight_has_dmc = data[ "oversight_has_dmc" ] diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index e3e233da..17324cd9 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -47,7 +47,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def post(self, study_id: int): """Create study overall official metadata""" - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index c7a7b82b..3efa5752 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -48,7 +48,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: dict | typing.Any = request.json + data: Union[dict | typing.Any] = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index f78fc650..17901804 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -77,7 +77,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def put(self, study_id: int): """updating study collaborators""" - data: dict | typing.Any = request.json + data: typing.Union[dict | typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/app.py b/app.py index 728a860f..8baa5cc4 100644 --- a/app.py +++ b/app.py @@ -1,6 +1,8 @@ """Entry point for the application.""" import datetime from datetime import timezone +from typing import Union + import jwt from flask import Flask, request from flask_bcrypt import Bcrypt @@ -126,7 +128,7 @@ def on_after_request(resp): # print(request.cookies.get("token")) if "token" not in request.cookies: return resp - token: str | bytes = request.cookies.get("token") + token: Union[str | bytes] = request.cookies.get("token") try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: From 785af37ec0da5ff79af90960bd6dba2083b11029 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 14:33:01 -0700 Subject: [PATCH 285/505] fix: lint error --- apis/study_metadata/study_reference.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 3efa5752..0a2b4e0a 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -48,7 +48,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: Union[dict | typing.Any] = request.json + data: typing.Union[dict | typing.Any] = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: From 0edb2eddc3554f69538256bc365c1eea0034d446 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 16:50:49 -0700 Subject: [PATCH 286/505] fix: lint and flake8 errors --- apis/authentication.py | 5 +++-- apis/contributor.py | 2 +- apis/dataset.py | 2 +- apis/participant.py | 5 ++++- app.py | 3 +-- model/invited_study_contributor.py | 2 +- model/participant.py | 3 ++- model/study_contributor.py | 4 ++-- model/version.py | 10 ++++++++-- 9 files changed, 23 insertions(+), 13 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 33b4758a..9f50778b 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -3,11 +3,12 @@ import uuid from datetime import timezone from typing import Any, Union -import config + import jwt from flask import g, make_response, request from flask_restx import Namespace, Resource, fields +import config import model api = Namespace("Authentication", description="Authentication paths", path="/") @@ -106,7 +107,7 @@ def authentication(): if "token" not in request.cookies: return - token: Union[str | None] = request.cookies.get("token") + token: str = request.cookies.get("token") if request.cookies.get("token") else "" # type: ignore try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: diff --git a/apis/contributor.py b/apis/contributor.py index 0e02b85c..ef07ab98 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,5 +1,5 @@ -from typing import Any, List, Union from collections import OrderedDict +from typing import Any, List, Union from flask import g, request from flask_restx import Namespace, Resource, fields diff --git a/apis/dataset.py b/apis/dataset.py index 13ffab20..68fe6b60 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -55,7 +55,7 @@ def post(self, study_id): if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 # todo if study.participant id== different study Throw error - data: dict = request.json + data: typing.Union[typing.Any | dict] = request.json dataset_ = model.Dataset.from_data(study, data) model.db.session.add(dataset_) model.db.session.commit() diff --git a/apis/participant.py b/apis/participant.py index 015c7c05..e70bd400 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -1,3 +1,5 @@ +from typing import Any, Union + from flask import Response, request from flask_restx import Namespace, Resource, fields @@ -35,10 +37,11 @@ def get(self, study_id: int): @api.response(400, "Validation Error") # @api.marshal_with(participant_model) def post(self, study_id: int): + data: Union[Any | dict] = request.json if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 study = model.Study.query.get(study_id) - add_participant = model.Participant.from_data(request.json, study) + add_participant = model.Participant.from_data(data, study) model.db.session.add(add_participant) model.db.session.commit() return add_participant.to_dict(), 201 diff --git a/app.py b/app.py index 8baa5cc4..8da51f92 100644 --- a/app.py +++ b/app.py @@ -1,7 +1,6 @@ """Entry point for the application.""" import datetime from datetime import timezone -from typing import Union import jwt from flask import Flask, request @@ -128,7 +127,7 @@ def on_after_request(resp): # print(request.cookies.get("token")) if "token" not in request.cookies: return resp - token: Union[str | bytes] = request.cookies.get("token") + token: str = request.cookies.get("token") if request.cookies.get("token") else "" # type: ignore try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 77a15de2..0b360288 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,8 +1,8 @@ import datetime import uuid -from .study import Study from .db import db +from .study import Study # from datetime import datetime, timezone diff --git a/model/participant.py b/model/participant.py index cf69927f..669b1834 100644 --- a/model/participant.py +++ b/model/participant.py @@ -3,8 +3,9 @@ from datetime import timezone import model -from .study import Study + from .db import db +from .study import Study class Participant(db.Model): # type: ignore diff --git a/model/study_contributor.py b/model/study_contributor.py index 1f04b007..d277f8b4 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -1,8 +1,8 @@ import datetime -from .user import User -from .study import Study from .db import db +from .study import Study +from .user import User # from datetime import datetime, timezone diff --git a/model/version.py b/model/version.py index c7dc0541..8829659b 100644 --- a/model/version.py +++ b/model/version.py @@ -2,11 +2,13 @@ import uuid from datetime import timezone +from sqlalchemy import Table + from model.dataset import Dataset from .db import db -version_participants = db.Table( +version_participants: Table = db.Table( "version_participants", db.Model.metadata, db.Column("dataset_version_id", db.ForeignKey("version.id"), primary_key=True), @@ -44,9 +46,13 @@ def to_dict(self): "created_at": self.created_at, "doi": self.doi, "published": self.published, - "participants": [p.id for p in self.participants], + "participants": [p.id for p in self.participants] + if isinstance(self.participants, (list, set)) + else [], } + # [p.id for p in self.participants] + @staticmethod def from_data(dataset: Dataset, data: dict): dataset_version_obj = Version(dataset) From acd27bba4023e7258a68329ca9d5624c3c217e3f Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 16:59:03 -0700 Subject: [PATCH 287/505] fix: lint and flake8 errors --- apis/dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/dataset.py b/apis/dataset.py index 68fe6b60..46690870 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -69,7 +69,7 @@ def post(self, study_id): class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): # pylint: disable EW0613 data_obj = model.Dataset.query.get(dataset_id) return data_obj.to_dict() From 3de32023f0f131822b04142a4a94f2ae07ddace4 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 17:42:31 -0700 Subject: [PATCH 288/505] fix: union types --- apis/authentication.py | 10 ++++++---- apis/contributor.py | 10 +++++----- apis/dataset.py | 12 ++++++------ .../dataset_metadata/dataset_alternate_identifier.py | 2 +- apis/dataset_metadata/dataset_description.py | 2 +- apis/dataset_metadata/dataset_funder.py | 2 +- apis/dataset_metadata/dataset_related_item.py | 2 +- apis/dataset_metadata/dataset_rights.py | 2 +- apis/dataset_metadata/dataset_subject.py | 2 +- apis/dataset_metadata/dataset_title.py | 2 +- apis/participant.py | 2 +- apis/study.py | 2 +- apis/study_metadata/study_arm.py | 2 +- apis/study_metadata/study_available_ipd.py | 2 +- apis/study_metadata/study_contact.py | 2 +- apis/study_metadata/study_identification.py | 2 +- apis/study_metadata/study_intervention.py | 2 +- apis/study_metadata/study_link.py | 2 +- apis/study_metadata/study_location.py | 2 +- apis/study_metadata/study_other.py | 4 ++-- apis/study_metadata/study_overall_official.py | 2 +- apis/study_metadata/study_reference.py | 2 +- apis/study_metadata/study_sponsors_collaborators.py | 2 +- apis/user.py | 3 ++- model/dataset.py | 3 ++- model/user.py | 4 ++-- 26 files changed, 44 insertions(+), 40 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 9f50778b..1465d804 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -42,7 +42,7 @@ class SignUpUser(Resource): @api.expect(signup_model) def post(self): """signs up the new users and saves data in DB""" - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json # TODO data[email doesnt exist then raise error; json validation library pattern = r"^[\w\.-]+@[\w\.-]+\.\w+$" if not data["email_address"] or not re.match(pattern, data["email_address"]): @@ -73,7 +73,7 @@ class Login(Resource): def post(self): """logs in user and handles few authentication errors. Also, it sets token for logged user along with expiration date""" - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).one_or_none() if not user: @@ -107,7 +107,8 @@ def authentication(): if "token" not in request.cookies: return - token: str = request.cookies.get("token") if request.cookies.get("token") else "" # type: ignore + token: str = request.cookies.get("token") if ( + request.cookies.get("token")) else "" # type: ignore try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: @@ -225,7 +226,8 @@ def post(self): @api.route("/auth/current-users") class CurrentUsers(Resource): - """function is used to see all logged users in the system. For now, it is used for testing purposes""" + """function is used to see all logged users in + the system. For now, it is used for testing purposes""" @api.response(200, "Success") @api.response(400, "Validation Error") diff --git a/apis/contributor.py b/apis/contributor.py index ef07ab98..56912e3e 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -43,7 +43,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify", 403 - data: Union[dict | Any] = request.json + data: Union[dict, Any] = request.json email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] @@ -75,11 +75,11 @@ def put(self, study_id: int, user_id: int): "Access denied, you are not authorized to change this permission", 403, ) - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json user = model.User.query.get(user_id) if not user: return "user not found", 404 - permission: Union[Any | str] = data["role"] + permission: Union[Any, str] = data["role"] grantee = model.StudyContributor.query.filter( model.StudyContributor.user == user, model.StudyContributor.study == study ).first() @@ -124,7 +124,7 @@ def delete(self, study_id: int, user_id: str): ).first() if not granter: return "you are not contributor of this study", 403 - grants: Union[OrderedDict[str, list | list[str]]] = OrderedDict() + grants: Union[OrderedDict[str, list[str]]] = OrderedDict() grants["viewer"] = [] grants["editor"] = [] grants["admin"] = ["viewer", "editor"] @@ -134,7 +134,7 @@ def delete(self, study_id: int, user_id: str): invited_grantee = model.StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() - invited_grants: Union[OrderedDict[str, list | List[str]]] = OrderedDict() + invited_grants: Union[OrderedDict[str, List[str]]] = OrderedDict() invited_grants["viewer"] = [] invited_grants["editor"] = [] invited_grants["admin"] = ["viewer", "editor", "admin"] diff --git a/apis/dataset.py b/apis/dataset.py index 46690870..aa6c94c4 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -55,7 +55,7 @@ def post(self, study_id): if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 # todo if study.participant id== different study Throw error - data: typing.Union[typing.Any | dict] = request.json + data: typing.Union[typing.Any, dict] = request.json dataset_ = model.Dataset.from_data(study, data) model.db.session.add(dataset_) model.db.session.commit() @@ -69,7 +69,7 @@ def post(self, study_id): class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - def get(self, study_id: int, dataset_id: int): # pylint: disable EW0613 + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument data_obj = model.Dataset.query.get(dataset_id) return data_obj.to_dict() @@ -115,11 +115,11 @@ class VersionResource(Resource): @api.response(400, "Validation Error") @api.doc("dataset version") @api.marshal_with(dataset_versions_model) - def get(self, study_id: int, dataset_id: int, version_id: int): + def get(self, study_id: int, dataset_id: int, version_id: int): # pylint: disable= unused-argument dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() - def put(self, study_id: int, dataset_id: int, version_id: int): + def put(self, study_id: int, dataset_id: int, version_id: int): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("publish_dataset", study): return "Access denied, you can not modify", 403 @@ -128,7 +128,7 @@ def put(self, study_id: int, dataset_id: int, version_id: int): model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 - def delete(self, study_id: int, dataset_id: int, version_id: int): + def delete(self, study_id: int, dataset_id: int, version_id: int): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 @@ -149,7 +149,7 @@ def post(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("publish_version", study): return "Access denied, you can not modify", 403 - data: typing.Union[typing.Any | dict] = request.json + data: typing.Union[typing.Any, dict] = request.json data["participants"] = [ model.Participant.query.get(i) for i in data["participants"] ] diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 0575ed73..f95804c3 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -29,7 +29,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_identifier_] def post(self, study_id: int, dataset_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index b0a90acc..511a52a6 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -28,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_description_] def post(self, study_id: int, dataset_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 561015e0..4b2470f3 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -33,7 +33,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_funder_] def post(self, study_id: int, dataset_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_funder_ = model.DatasetFunder.from_data(data_obj, data) model.db.session.add(dataset_funder_) diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index b0944e6b..93b03da8 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -28,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_related_item_] def post(self, study_id: int, dataset_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_related_item_ = model.DatasetRelatedItem.from_data(data_obj, data) model.db.session.add(dataset_related_item_) diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index e00e0eda..bdfa724e 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -31,7 +31,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_rights_] def post(self, study_id: int, dataset_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_rights_ = model.DatasetRights.from_data(data_obj, data) model.db.session.add(dataset_rights_) diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index f00a9d3d..b1fbfe9a 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -32,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_subject_] def post(self, study_id: int, dataset_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) dataset_subject_ = model.DatasetSubject.from_data(data_obj, data) model.db.session.add(dataset_subject_) diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index f62cd2cc..63b7602e 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -29,7 +29,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_title_] def post(self, study_id: int, dataset_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: diff --git a/apis/participant.py b/apis/participant.py index e70bd400..9b069831 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -37,7 +37,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") # @api.marshal_with(participant_model) def post(self, study_id: int): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json if is_granted("viewer", study_id): return "Access denied, you can not modify", 403 study = model.Study.query.get(study_id) diff --git a/apis/study.py b/apis/study.py index 156d9553..796e03f5 100644 --- a/apis/study.py +++ b/apis/study.py @@ -43,7 +43,7 @@ def get(self): @api.response(200, "Success") @api.response(400, "Validation Error") def post(self): - data: Union[Any | dict] = request.json + data: Union[Any, dict] = request.json add_study = model.Study.from_data(data) model.db.session.add(add_study) study_id = add_study.id diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 1cffa885..83926b1f 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -49,7 +49,7 @@ def post(self, study_id): study: model.Study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index de3f9417..dd2914f5 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -50,7 +50,7 @@ def post(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) list_of_elements = [] diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 5985cc8d..552b09f4 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -47,7 +47,7 @@ def post(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index f0c8a83e..f04fd167 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -43,7 +43,7 @@ def get(self, study_id: int): @api.expect(study_identification) def post(self, study_id: int): """Create study identification metadata""" - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index f62ffd20..dc7bd8a9 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -49,7 +49,7 @@ def post(self, study_id: int): if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 list_of_elements = [] - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json for i in data: if "id" in i and i["id"]: study_intervention_ = model.StudyIntervention.query.get(i["id"]) diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index a57e6b58..bd65715e 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -40,7 +40,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 996a362d..1b73f275 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -47,7 +47,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 5420f1da..730963d7 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -70,7 +70,7 @@ def put(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json study_oversight = study_obj.study_other.oversight_has_dmc = data[ "oversight_has_dmc" ] @@ -99,7 +99,7 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study conditions metadata""" - data: dict | typing.Any = request.json + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 17324cd9..31ff7872 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -47,7 +47,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def post(self, study_id: int): """Create study overall official metadata""" - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 0a2b4e0a..5ba26b67 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -48,7 +48,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json list_of_elements = [] for i in data: if "id" in i and i["id"]: diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 17901804..2a348599 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -77,7 +77,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def put(self, study_id: int): """updating study collaborators""" - data: typing.Union[dict | typing.Any] = request.json + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/user.py b/apis/user.py index cc7a02e5..a1df656d 100644 --- a/apis/user.py +++ b/apis/user.py @@ -25,7 +25,8 @@ @api.route("/user/profile") class UserDetailsEndpoint(Resource): @api.doc( - description="Returns user details gathered from the user and user_details tables" + description="Returns user details gathered from the" + " user and user_details tables" ) @api.response(200, "Success", study_model) @api.response(400, "Validation Error") diff --git a/model/dataset.py b/model/dataset.py index 885509cc..9b39dfa3 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -106,7 +106,8 @@ def last_published(self): ) def last_modified(self): - return self.dataset_versions.order_by(model.Version.updated_on.desc()).first() # type: ignore + return (self.dataset_versions.order_by + (model.Version.updated_on.desc()).first()) # type: ignore @staticmethod def from_data(study: Study, data: dict): diff --git a/model/user.py b/model/user.py index 19b53299..a38a5f6e 100644 --- a/model/user.py +++ b/model/user.py @@ -13,7 +13,7 @@ class User(db.Model): # type: ignore def __init__(self, password, data): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() - self.set_password(password, data) + self.set_password(password) self.user_details = model.UserDetails(self) __tablename__ = "user" @@ -53,7 +53,7 @@ def update(self, data): # self.hash = data["hash"] # self.created_at = data["created_at"] - def set_password(self, password: str, data): + def set_password(self, password: str): """setting bcrypt passwords""" hashed_password = app.bcrypt.generate_password_hash(password).decode("utf-8") self.hash = hashed_password From 199ff1b04e661023b6632e2ea65cadb997eae65f Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 13 Oct 2023 00:43:12 +0000 Subject: [PATCH 289/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 5 +++-- apis/dataset.py | 12 +++++++++--- apis/user.py | 2 +- model/dataset.py | 5 +++-- 4 files changed, 16 insertions(+), 8 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 1465d804..0dc5e105 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -107,8 +107,9 @@ def authentication(): if "token" not in request.cookies: return - token: str = request.cookies.get("token") if ( - request.cookies.get("token")) else "" # type: ignore + token: str = ( + request.cookies.get("token") if (request.cookies.get("token")) else "" + ) # type: ignore try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: diff --git a/apis/dataset.py b/apis/dataset.py index aa6c94c4..dcc42351 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -115,11 +115,15 @@ class VersionResource(Resource): @api.response(400, "Validation Error") @api.doc("dataset version") @api.marshal_with(dataset_versions_model) - def get(self, study_id: int, dataset_id: int, version_id: int): # pylint: disable= unused-argument + def get( + self, study_id: int, dataset_id: int, version_id: int + ): # pylint: disable= unused-argument dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() - def put(self, study_id: int, dataset_id: int, version_id: int): # pylint: disable= unused-argument + def put( + self, study_id: int, dataset_id: int, version_id: int + ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("publish_dataset", study): return "Access denied, you can not modify", 403 @@ -128,7 +132,9 @@ def put(self, study_id: int, dataset_id: int, version_id: int): # pylint: disa model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 - def delete(self, study_id: int, dataset_id: int, version_id: int): # pylint: disable= unused-argument + def delete( + self, study_id: int, dataset_id: int, version_id: int + ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 diff --git a/apis/user.py b/apis/user.py index a1df656d..cfa3ae29 100644 --- a/apis/user.py +++ b/apis/user.py @@ -26,7 +26,7 @@ class UserDetailsEndpoint(Resource): @api.doc( description="Returns user details gathered from the" - " user and user_details tables" + " user and user_details tables" ) @api.response(200, "Success", study_model) @api.response(400, "Validation Error") diff --git a/model/dataset.py b/model/dataset.py index 9b39dfa3..153e985d 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -106,8 +106,9 @@ def last_published(self): ) def last_modified(self): - return (self.dataset_versions.order_by - (model.Version.updated_on.desc()).first()) # type: ignore + return self.dataset_versions.order_by( + model.Version.updated_on.desc() + ).first() # type: ignore @staticmethod def from_data(study: Study, data: dict): From 88234ed5727e60d5d5920ea44b316a88cd7411bd Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 17:46:37 -0700 Subject: [PATCH 290/505] fix: union types --- apis/contributor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/contributor.py b/apis/contributor.py index 56912e3e..e26d2c8c 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -124,7 +124,7 @@ def delete(self, study_id: int, user_id: str): ).first() if not granter: return "you are not contributor of this study", 403 - grants: Union[OrderedDict[str, list[str]]] = OrderedDict() + grants: Union[OrderedDict[str, List[str]]] = OrderedDict() grants["viewer"] = [] grants["editor"] = [] grants["admin"] = ["viewer", "editor"] From c5943a47d0f3c3e059118abc177013ca9f567de8 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 17:50:41 -0700 Subject: [PATCH 291/505] fix: none type --- apis/authentication.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 0dc5e105..ba6153d7 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -108,8 +108,8 @@ def authentication(): if "token" not in request.cookies: return token: str = ( - request.cookies.get("token") if (request.cookies.get("token")) else "" - ) # type: ignore + request.cookies.get("token") if (request.cookies.get("token")) else "" # type: ignore + ) try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: From b7ec2dac86c6b69f2691cb548a4b60d23b42807a Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 18:40:58 -0700 Subject: [PATCH 292/505] fix: pylint errors --- apis/authentication.py | 47 +++++++++++++++++++++------------------ apis/contributor.py | 13 +++++++---- apis/dataset.py | 4 ++-- apis/participant.py | 2 +- apis/user.py | 5 ++++- app.py | 10 ++++++--- model/dataset.py | 2 +- model/dataset_versions.py | 2 +- model/study.py | 16 ++++++------- model/user.py | 4 ++-- 10 files changed, 59 insertions(+), 46 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index ba6153d7..d9eedb28 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -81,23 +81,22 @@ def post(self): validate_pass = user.check_password(data["password"]) if not validate_pass: return "Invalid credentials", 401 - else: - encoded_jwt_code = jwt.encode( - { - "user": user.id, - "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=180), # noqa: W503 - "jti": str(uuid.uuid4()), - }, # noqa: W503 - config.secret, - algorithm="HS256", - ) - resp = make_response(user.to_dict()) - resp.set_cookie( - "token", encoded_jwt_code, secure=True, httponly=True, samesite="lax" - ) - resp.status_code = 200 - return resp + encoded_jwt_code = jwt.encode( + { + "user": user.id, + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=180), # noqa: W503 + "jti": str(uuid.uuid4()), + }, # noqa: W503 + config.secret, + algorithm="HS256", + ) + resp = make_response(user.to_dict()) + resp.set_cookie( + "token", encoded_jwt_code, secure=True, httponly=True, samesite="lax" + ) + resp.status_code = 200 + return resp def authentication(): @@ -108,7 +107,9 @@ def authentication(): if "token" not in request.cookies: return token: str = ( - request.cookies.get("token") if (request.cookies.get("token")) else "" # type: ignore + request.cookies.get("token") + if (request.cookies.get("token")) + else "" # type: ignore ) try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) @@ -200,10 +201,12 @@ def is_granted(permission: str, study=None): return permission in role[contributor.permission] -def is_study_metadata(study_id: int): - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 +# +# def is_study_metadata(study_id: int): +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not delete study", 403 +# @api.route("/auth/logout") diff --git a/apis/contributor.py b/apis/contributor.py index e26d2c8c..ee388b3c 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -87,7 +87,9 @@ def put(self, study_id: int, user_id: int): model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() # Order should go from the least privileged to the most privileged - grants: Union[OrderedDict[str, List[str]]] = OrderedDict() + grants: Union[ + OrderedDict[str, List[str]] + ] = OrderedDict() # pylint: disable= unsubscriptable-object grants["viewer"] = [] grants["editor"] = ["viewer"] grants["admin"] = ["viewer", "editor", "admin"] @@ -97,7 +99,6 @@ def put(self, study_id: int, user_id: int): if not can_grant: return f"User cannot grant {permission}", 403 - # Granter can not downgrade anyone of equal or greater permissions other than themselves # TODO: Owners downgrading themselves if user != g.user: grantee_level = list(grants.keys()).index(grantee.permission) # 1 @@ -124,7 +125,9 @@ def delete(self, study_id: int, user_id: str): ).first() if not granter: return "you are not contributor of this study", 403 - grants: Union[OrderedDict[str, List[str]]] = OrderedDict() + grants: Union[ + OrderedDict[str, List[str]] + ] = OrderedDict() # pylint: disable= unsubscriptable-object grants["viewer"] = [] grants["editor"] = [] grants["admin"] = ["viewer", "editor"] @@ -134,7 +137,9 @@ def delete(self, study_id: int, user_id: str): invited_grantee = model.StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() - invited_grants: Union[OrderedDict[str, List[str]]] = OrderedDict() + invited_grants: Union[ + OrderedDict[str, List[str]] + ] = OrderedDict() # pylint: disable= unsubscriptable-object invited_grants["viewer"] = [] invited_grants["editor"] = [] invited_grants["admin"] = ["viewer", "editor", "admin"] diff --git a/apis/dataset.py b/apis/dataset.py index dcc42351..5f71b0e0 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -55,8 +55,8 @@ def post(self, study_id): if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 # todo if study.participant id== different study Throw error - data: typing.Union[typing.Any, dict] = request.json - dataset_ = model.Dataset.from_data(study, data) + # data: typing.Union[typing.Any, dict] = request.json + dataset_ = model.Dataset.from_data(study) model.db.session.add(dataset_) model.db.session.commit() return dataset_.to_dict() diff --git a/apis/participant.py b/apis/participant.py index 9b069831..71a34253 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -29,7 +29,7 @@ class AddParticipant(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(participant_model) - def get(self, study_id: int): + def get(self, study_id: int): # pylint: disable= unused-argument participants = model.Participant.query.all() return [p.to_dict() for p in participants] diff --git a/apis/user.py b/apis/user.py index cfa3ae29..3084bdda 100644 --- a/apis/user.py +++ b/apis/user.py @@ -1,3 +1,5 @@ +from typing import Any, Union + from flask import g, request from flask_restx import Namespace, Resource, fields @@ -43,7 +45,8 @@ def get(self): @api.marshal_with(study_model) def put(self): """Updates user details""" - data = request.json + data: Union[Any, dict] = request.json + if data is None: return {"message": "No data provided"}, 400 user = model.User.query.get(g.user.id) diff --git a/app.py b/app.py index 8da51f92..937a1f6b 100644 --- a/app.py +++ b/app.py @@ -38,7 +38,8 @@ def create_app(): # print(app.config) if config.secret and len(config.secret) < 14: - raise "secret key should contain at least 14 characters" # type: ignore # E0702 + raise RuntimeError("secret key should contain at" + " least 14 characters") if "DATABASE_URL" in app.config: # if "TESTING" in app_config and app_config["TESTING"]: @@ -100,7 +101,7 @@ def create_app(): # model.db.create_all() @app.before_request - def on_before_request(): + def on_before_request(): # pylint: disable = inconsistent-return-statements if request.method == "OPTIONS": return @@ -127,7 +128,10 @@ def on_after_request(resp): # print(request.cookies.get("token")) if "token" not in request.cookies: return resp - token: str = request.cookies.get("token") if request.cookies.get("token") else "" # type: ignore + token: str = ( + request.cookies.get("token") + if request.cookies.get("token") else "" # type: ignore + ) try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) except jwt.ExpiredSignatureError: diff --git a/model/dataset.py b/model/dataset.py index 153e985d..db43a567 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -111,7 +111,7 @@ def last_modified(self): ).first() # type: ignore @staticmethod - def from_data(study: Study, data: dict): + def from_data(study: Study): dataset_obj = Dataset(study) dataset_obj.update() return dataset_obj diff --git a/model/dataset_versions.py b/model/dataset_versions.py index 3f3fe427..1bff5a82 100644 --- a/model/dataset_versions.py +++ b/model/dataset_versions.py @@ -6,7 +6,7 @@ def __init__( self, last_published: Version, last_modified: Version, - id: str, + id: str, # pylint: disable = redefined-builtin ): self.latest_version = last_modified.id self.published_version = last_published.id diff --git a/model/study.py b/model/study.py index be9ae291..fe438e60 100644 --- a/model/study.py +++ b/model/study.py @@ -211,9 +211,8 @@ def add_user_to_study(self, user, permission): ).all() if contributor: raise StudyException("User is already exists in study") - else: - contributor = model.StudyContributor(self, user, permission) - db.session.add(contributor) + contributor = model.StudyContributor(self, user, permission) + db.session.add(contributor) return contributor def invite_user_to_study(self, email_address, permission): @@ -224,9 +223,8 @@ def invite_user_to_study(self, email_address, permission): raise StudyException( "This email address has already been invited to this study" ) - else: - contributor_add = model.StudyInvitedContributor( - self, email_address, permission - ) - db.session.add(contributor_add) - return contributor_add + contributor_add = model.StudyInvitedContributor( + self, email_address, permission + ) + db.session.add(contributor_add) + return contributor_add diff --git a/model/user.py b/model/user.py index a38a5f6e..c252b9a2 100644 --- a/model/user.py +++ b/model/user.py @@ -10,7 +10,7 @@ class User(db.Model): # type: ignore - def __init__(self, password, data): + def __init__(self, password): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() self.set_password(password) @@ -39,7 +39,7 @@ def to_dict(self): @staticmethod def from_data(data: dict): - user = User(data["password"], data) + user = User(data["password"]) user.update(data) return user From 04e5ef503135b7c1198ff102a187eaed34183806 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 13 Oct 2023 01:42:02 +0000 Subject: [PATCH 293/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 6 +++--- model/study.py | 4 +--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/app.py b/app.py index 937a1f6b..067f24cf 100644 --- a/app.py +++ b/app.py @@ -38,8 +38,7 @@ def create_app(): # print(app.config) if config.secret and len(config.secret) < 14: - raise RuntimeError("secret key should contain at" - " least 14 characters") + raise RuntimeError("secret key should contain at" " least 14 characters") if "DATABASE_URL" in app.config: # if "TESTING" in app_config and app_config["TESTING"]: @@ -130,7 +129,8 @@ def on_after_request(resp): return resp token: str = ( request.cookies.get("token") - if request.cookies.get("token") else "" # type: ignore + if request.cookies.get("token") + else "" # type: ignore ) try: decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) diff --git a/model/study.py b/model/study.py index fe438e60..9a3b1d2a 100644 --- a/model/study.py +++ b/model/study.py @@ -223,8 +223,6 @@ def invite_user_to_study(self, email_address, permission): raise StudyException( "This email address has already been invited to this study" ) - contributor_add = model.StudyInvitedContributor( - self, email_address, permission - ) + contributor_add = model.StudyInvitedContributor(self, email_address, permission) db.session.add(contributor_add) return contributor_add From 78466ddb6b2cae6e55fbc25cce7d2eb7223c7678 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 12 Oct 2023 18:52:34 -0700 Subject: [PATCH 294/505] fix: pylint errors --- apis/contributor.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index ee388b3c..b1df38e0 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,5 +1,5 @@ from collections import OrderedDict -from typing import Any, List, Union +from typing import Any, List, Union, Dict from flask import g, request from flask_restx import Namespace, Resource, fields @@ -87,9 +87,7 @@ def put(self, study_id: int, user_id: int): model.StudyContributor.user == g.user, model.StudyContributor.study == study ).first() # Order should go from the least privileged to the most privileged - grants: Union[ - OrderedDict[str, List[str]] - ] = OrderedDict() # pylint: disable= unsubscriptable-object + grants: Dict[str, List[str]] = OrderedDict() grants["viewer"] = [] grants["editor"] = ["viewer"] grants["admin"] = ["viewer", "editor", "admin"] @@ -125,9 +123,7 @@ def delete(self, study_id: int, user_id: str): ).first() if not granter: return "you are not contributor of this study", 403 - grants: Union[ - OrderedDict[str, List[str]] - ] = OrderedDict() # pylint: disable= unsubscriptable-object + grants: Dict[str, List[str]] = OrderedDict() grants["viewer"] = [] grants["editor"] = [] grants["admin"] = ["viewer", "editor"] @@ -137,9 +133,9 @@ def delete(self, study_id: int, user_id: str): invited_grantee = model.StudyInvitedContributor.query.filter_by( study_id=study_id, email_address=user_id ).first() - invited_grants: Union[ - OrderedDict[str, List[str]] - ] = OrderedDict() # pylint: disable= unsubscriptable-object + # invited_grants: Union[OrderedDict + # [str, List[str]]] = OrderedDict() + invited_grants: Dict[str, List[str]] = OrderedDict() invited_grants["viewer"] = [] invited_grants["editor"] = [] invited_grants["admin"] = ["viewer", "editor", "admin"] From 508673ad5f9bf88d3c3c449fb0fd230defc01436 Mon Sep 17 00:00:00 2001 From: aydawka Date: Fri, 13 Oct 2023 10:34:35 -0700 Subject: [PATCH 295/505] fix: pylint errors --- .pylint.ini | 3 ++- app.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.pylint.ini b/.pylint.ini index a84d9038..965d3ef4 100644 --- a/.pylint.ini +++ b/.pylint.ini @@ -408,4 +408,5 @@ known-third-party=enchant # Exceptions that will emit a warning when being caught. Defaults to # "Exception" -overgeneral-exceptions=Exception \ No newline at end of file +overgeneral-exceptions=builtins.BaseException, + builtins.Exception \ No newline at end of file diff --git a/app.py b/app.py index 067f24cf..f302721c 100644 --- a/app.py +++ b/app.py @@ -38,7 +38,7 @@ def create_app(): # print(app.config) if config.secret and len(config.secret) < 14: - raise RuntimeError("secret key should contain at" " least 14 characters") + raise RuntimeError("secret key should contain at least 14 characters") if "DATABASE_URL" in app.config: # if "TESTING" in app_config and app_config["TESTING"]: From 9d0d5d5070697c4ff6110454684dc8104e4197dc Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Fri, 13 Oct 2023 15:10:52 -0700 Subject: [PATCH 296/505] feat: pytest (#13) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: allow config from pytest to be ran in app.py * dx: ignore deprecation warnings with pytest * feat: update tests for server, study api * dx: ignore dependency warnings for all pytest commands * style: 🎨 fix code style issues with Black * wip: adding more tests to pytest * feat: added pyfixture for test_client * feat: have create_app use pytest-config or config based on testing * feat: use pytest-config for pytest * chore: clean app.py * feat: setup tests and fixtures for study api, create dataset api tests * wip: share pytest fixtures across tests with updated value * style: 🎨 fix code style issues with Black * feat: clear database fixture for testing * feat: pytest functions added for study, study metadata, datasets * style: 🎨 fix code style issues with Black * fix: study arm from new entry is patched * fix: patched and cleaned arm metadata api (post) * chore: clean test study api file * feat: track arm id * feat: tests added for central contact, ipd, arm metadata * feat: track arm id through testing * feat: added file for dataset metadata testing * fix: removed for loop and just check for id one time for POST cc endpoint * feat: tests added for collab, cond, desc, design, eligi, id, inter, ipd, link * style: 🎨 fix code style issues with Black * fix: central contact tests fixed, one found bug in response * feat: global variables added for study metadata tests * chore: remove print statements * refactor: /study POST endpoint returns content rather than 204 * wip: modifying login to handle config file for pytest * wip: reworking pytest to login in user once for testing * style: 🎨 fix code style issues with Black * refactor: allow login to use different configuration for testing * style: 🎨 fix code style issues with Black * refactor: use FAIRHUB_SECRET rather than secret for configuration * refactor: create user right after database is cleared * refactor: isort tests folder * fix: resolve unused arguments for pylint * chore: remove prints * feat: add description and payload requirements to ipd endpoint in swagger * feat: add description and payload requirements to arm endpoint in swagger * feat: add more tests for pytest * style: 🎨 fix code style issues with Black * feat: POST, PUT, DELETE tests added for study metadata * style: 🎨 fix code style issues with Black * feat: more tests added for study metadata * style: 🎨 fix code style issues with Black * fix: fix intervention metadata test * chore: gather information on endpoints that aren't working * fix: failed tests corrected * 🚨 fix: fix all linter warnings * 🐛 fix: update secret config * 👷 ci: add test workflow * 💚 test: ci workflow --------- Co-authored-by: Lint Action Co-authored-by: Sanjay Soundarajan --- .github/workflows/test.yml | 39 + apis/authentication.py | 49 +- apis/contributor.py | 13 +- apis/study.py | 20 +- apis/study_metadata/study_arm.py | 2 +- apis/study_metadata/study_available_ipd.py | 5 +- apis/study_metadata/study_reference.py | 3 - app.py | 35 +- config.py | 2 +- db-docker-compose.yaml | 4 +- model/study.py | 2 +- pyproject.toml | 4 +- pytest_config.py | 12 + tests/conftest.py | 84 +- tests/functional/test_server_launch.py | 28 + tests/functional/test_study_api.py | 96 +- tests/functional/test_study_dataset_api.py | 37 + .../test_study_dataset_metadata_api.py | 1 + tests/functional/test_study_metadata_api.py | 925 ++++++++++++++++++ tests/unit/test_study_models.py | 21 +- 20 files changed, 1304 insertions(+), 78 deletions(-) create mode 100644 .github/workflows/test.yml create mode 100644 pytest_config.py create mode 100644 tests/functional/test_server_launch.py create mode 100644 tests/functional/test_study_dataset_api.py create mode 100644 tests/functional/test_study_dataset_metadata_api.py create mode 100644 tests/functional/test_study_metadata_api.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..1f9a28ad --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,39 @@ +name: Test + +on: [push] + +jobs: + lint: + name: Run tests + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8"] + env: + # These are simulated secrets for test workflow only. + FAIRHUB_DATABASE_URL: postgresql://admin:root@localhost:5432/fairhub_local + FAIRHUB_SECRET: mXrkOHXXQoMAhCOTZOV93QlncmeTwEZFPxTP1TXGiOFabE0KmuZgHWvTOLgjbv3S + + steps: + - uses: actions/checkout@v2 + + - name: Crate a database for tests with docker + run: docker run --name postgres -p 5432:5432 -e POSTGRES_USER=admin -e POSTGRES_PASSWORD=root -e POSTGRES_DB=fairhub_local -d postgres:latest + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - uses: Gr1N/setup-poetry@v8 + + - name: Install dependencies + run: poetry install + + - uses: actions/cache@v2 + with: + path: .venv + key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} + + - name: Run pytest (with capture) + run: poetry run pytest -s -W ignore::DeprecationWarning diff --git a/apis/authentication.py b/apis/authentication.py index d9eedb28..719eb633 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -1,5 +1,9 @@ import datetime +import importlib +import os import re + +# import config import uuid from datetime import timezone from typing import Any, Union @@ -8,7 +12,6 @@ from flask import g, make_response, request from flask_restx import Namespace, Resource, fields -import config import model api = Namespace("Authentication", description="Authentication paths", path="/") @@ -74,13 +77,35 @@ def post(self): """logs in user and handles few authentication errors. Also, it sets token for logged user along with expiration date""" data: Union[Any, dict] = request.json + email_address = data["email_address"] + user = model.User.query.filter_by(email_address=email_address).one_or_none() + if not user: return "Invalid credentials", 401 + validate_pass = user.check_password(data["password"]) + if not validate_pass: return "Invalid credentials", 401 + + # Determine the appropriate configuration module + # based on the testing context + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + + config_module = importlib.import_module(config_module_name) + + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module + encoded_jwt_code = jwt.encode( { "user": user.id, @@ -88,14 +113,17 @@ def post(self): + datetime.timedelta(minutes=180), # noqa: W503 "jti": str(uuid.uuid4()), }, # noqa: W503 - config.secret, + config.FAIRHUB_SECRET, algorithm="HS256", ) + resp = make_response(user.to_dict()) + resp.set_cookie( "token", encoded_jwt_code, secure=True, httponly=True, samesite="lax" ) resp.status_code = 200 + return resp @@ -111,8 +139,21 @@ def authentication(): if (request.cookies.get("token")) else "" # type: ignore ) + + # Determine the appropriate configuration module based on the testing context + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + config_module = importlib.import_module(config_module_name) + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module try: - decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) + decoded = jwt.decode(token, config.FAIRHUB_SECRET, algorithms=["HS256"]) except jwt.ExpiredSignatureError: return token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) @@ -132,7 +173,7 @@ def authorization(): "/swaggerui", "/swagger.json", ] - print("g.user", g.user) + for route in public_routes: if request.path.startswith(route): return diff --git a/apis/contributor.py b/apis/contributor.py index b1df38e0..9338a50b 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,5 +1,5 @@ from collections import OrderedDict -from typing import Any, List, Union, Dict +from typing import Any, Dict, List, Union from flask import g, request from flask_restx import Namespace, Resource, fields @@ -140,24 +140,33 @@ def delete(self, study_id: int, user_id: str): invited_grants["editor"] = [] invited_grants["admin"] = ["viewer", "editor", "admin"] invited_grants["owner"] = ["editor", "viewer", "admin"] + can_delete = ( invited_grantee.permission in invited_grants[granter.permission] ) + if not can_delete: return f"User cannot delete {invited_grantee.permission}", 403 + model.db.session.delete(invited_grantee) + model.db.session.commit() + return 204 + user = model.User.query.get(user_id) + if not user: return "user is not found", 404 + contributors = model.StudyContributor.query.filter( model.StudyContributor.study == study ).all() - print(len(contributors), "") + grantee = model.StudyContributor.query.filter( model.StudyContributor.user == user, model.StudyContributor.study == study ).first() + if len(contributors) <= 1: return "the study must have at least one contributor", 422 if grantee.user == granter.user: diff --git a/apis/study.py b/apis/study.py index 796e03f5..2166d441 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,7 +1,7 @@ from typing import Any, Union from flask import g, request -from flask_restx import Namespace, Resource, fields +from flask_restx import Namespace, Resource, fields, reqparse import model @@ -21,7 +21,19 @@ @api.route("/study") class Studies(Resource): - @api.doc("list_study") + parser_study = reqparse.RequestParser(bundle_errors=True) + parser_study.add_argument( + "title", type=str, required=True, location="json", help="The title of the Study" + ) + parser_study.add_argument( + "image", + type=list, + required=True, + location="json", + help="The image for the Study", + ) + + @api.doc(description="Return a list of all studies") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study_model) @@ -56,7 +68,7 @@ def post(self): @api.route("/study/") class StudyResource(Resource): - @api.doc("get study") + @api.doc(description="Get a study's details") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study) @@ -67,6 +79,7 @@ def get(self, study_id: int): @api.expect(study_model) @api.response(200, "Success") @api.response(400, "Validation Error") + @api.doc(description="Update a study's details") def put(self, study_id: int): update_study = model.Study.query.get(study_id) if not is_granted("update_study", update_study): @@ -78,6 +91,7 @@ def put(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.doc(description="Delete a study") def delete(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_study", study): diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 83926b1f..60a11fb0 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -37,7 +37,7 @@ class StudyArmResource(Resource): @api.response(400, "Validation Error") # @api.marshal_with(study_arm) def get(self, study_id): - """Get study arm metadata""" + """Get all Arms for a study""" study_ = model.Study.query.get(study_id) arm = model.Arm(study_) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index dd2914f5..abc590fb 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -41,9 +41,12 @@ def get(self, study_id: int): return [s.to_dict() for s in sorted_study_available_ipd] - @api.doc("update available") + @api.doc( + description="An array of objects are expected within the payload with the keys demonstrated below to create an available-ipd" # noqa E501 + ) @api.response(200, "Success") @api.response(400, "Validation Error") + @api.expect(study_available) @api.marshal_with(study_available) def post(self, study_id: int): """Create study available metadata""" diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 5ba26b67..c3362dbd 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -36,9 +36,6 @@ def get(self, study_id: int): study_reference_ = study_.study_reference - # todo: remove print - print(study_.study_reference) - sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_reference] diff --git a/app.py b/app.py index f302721c..6a921349 100644 --- a/app.py +++ b/app.py @@ -1,5 +1,7 @@ """Entry point for the application.""" import datetime +import importlib +import os from datetime import timezone import jwt @@ -19,17 +21,18 @@ bcrypt = Bcrypt() -def create_app(): +def create_app(config_module=None): """Initialize the core application.""" # create and configure the app app = Flask(__name__) # `full` if you want to see all the details app.config["SWAGGER_UI_DOC_EXPANSION"] = "none" app.config["RESTX_MASK_SWAGGER"] = False + # Initialize config - app.config.from_pyfile("config.py") - # app.register_blueprint(api) + app.config.from_object(config_module or "config") + # app.register_blueprint(api) # TODO - fix this # csrf = CSRFProtect() # csrf.init_app(app) @@ -37,7 +40,7 @@ def create_app(): app.config.from_prefixed_env("FAIRHUB") # print(app.config) - if config.secret and len(config.secret) < 14: + if config.FAIRHUB_SECRET and len(config.FAIRHUB_SECRET) < 14: raise RuntimeError("secret key should contain at least 14 characters") if "DATABASE_URL" in app.config: @@ -127,13 +130,27 @@ def on_after_request(resp): # print(request.cookies.get("token")) if "token" not in request.cookies: return resp + token: str = ( request.cookies.get("token") if request.cookies.get("token") else "" # type: ignore ) + + # Determine the appropriate configuration module based on the testing context + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + config_module = importlib.import_module(config_module_name) + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module try: - decoded = jwt.decode(token, config.secret, algorithms=["HS256"]) + decoded = jwt.decode(token, config.FAIRHUB_SECRET, algorithms=["HS256"]) except jwt.ExpiredSignatureError: resp.set_cookie( "token", @@ -153,7 +170,7 @@ def on_after_request(resp): ) new_token = jwt.encode( {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, - config.secret, + config.FAIRHUB_SECRET, algorithm="HS256", ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") @@ -169,7 +186,7 @@ def on_after_request(resp): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" - print(resp.headers) + # print(resp.headers) return resp @@ -206,6 +223,6 @@ def destroy_schema(): args = parser.parse_args() port = args.port - app = create_app() + flask_app = create_app() - app.run(host="0.0.0.0", port=port) + flask_app.run(host="0.0.0.0", port=port) diff --git a/config.py b/config.py index db074343..f1d0304b 100644 --- a/config.py +++ b/config.py @@ -2,4 +2,4 @@ from os import environ FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") -secret = environ.get("secret") +FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") diff --git a/db-docker-compose.yaml b/db-docker-compose.yaml index 3961f8f0..6bb04c04 100644 --- a/db-docker-compose.yaml +++ b/db-docker-compose.yaml @@ -9,5 +9,5 @@ services: POSTGRES_DB: fairhub_local ports: - 5432:5432 - # volumes: - # - ./postgres-data:/var/lib/postgresql/data \ No newline at end of file + volumes: + - ./postgres-data:/var/lib/postgresql/data diff --git a/model/study.py b/model/study.py index 9a3b1d2a..a8546fff 100644 --- a/model/study.py +++ b/model/study.py @@ -158,7 +158,7 @@ def to_dict(self): contributor_permission = self.study_contributors.filter( model.StudyContributor.user_id == g.user.id ).first() - print(contributor_permission) + return { "id": self.id, "title": self.title, diff --git a/pyproject.toml b/pyproject.toml index 0195d31f..c007db2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -114,8 +114,8 @@ lint = ["flake8", "typecheck", "pylint"] precommit = ["format", "flake8", "typecheck", "pylint"] -test = "pytest -rx" -test_with_capture = "pytest -s" +test = "pytest -rx -W ignore::DeprecationWarning" +test_with_capture = "pytest -s -W ignore::DeprecationWarning" jupyter = "jupyter notebook" diff --git a/pytest_config.py b/pytest_config.py new file mode 100644 index 00000000..a6173df4 --- /dev/null +++ b/pytest_config.py @@ -0,0 +1,12 @@ +"""Configuration for testing the application.""" +from os import environ +from dotenv import load_dotenv + +# Load environment variables from .env +load_dotenv(".env") + + +class TestConfig: + FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") + FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") + TESTING = True diff --git a/tests/conftest.py b/tests/conftest.py index e760ed8e..bfaf8cd5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,38 +1,82 @@ """Defines fixtures available to all tests.""" +import os +import unittest.mock + import pytest +from dotenv import load_dotenv from app import create_app +from model.db import db +from pytest_config import TestConfig + +# Load environment variables from .env +load_dotenv(".env") + +# Set the FLASK_ENV environment variable to "testing" +os.environ["FLASK_ENV"] = "testing" +# Set global variable for study ID +pytest.global_study_id = {} +pytest.global_dataset_id = "" +pytest.global_version_id = "" +pytest.global_arm_id = "" +pytest.global_available_ipd_id = "" +pytest.global_cc_id = "" +pytest.global_identification_id = "" +pytest.global_intervention_id = "" +pytest.global_link_id = "" +pytest.global_location_id = "" +pytest.global_overall_official_id = "" +pytest.global_reference_id = "" + +# Create the flask app for testing @pytest.fixture() -def app(): +def flask_app(): """An application for the tests.""" - # config = { - # "TESTING": True, - # } - - flask_app = create_app() + yield create_app(config_module="pytest_config") - flask_app.config.update( - { - "TESTING": True, - } - ) - # other setup can go here +# Create a test client for the app +@pytest.fixture() +def _test_client(flask_app): + """A test client for the app.""" + with flask_app.test_client() as _test_client: + yield _test_client - yield flask_app - # clean up / reset resources here +# Empty local database for testing +@pytest.fixture() +def _empty_db(flask_app): + """Empty the local database.""" + with flask_app.app_context(): + meta = db.metadata + for table in reversed(meta.sorted_tables): + # print(f"Clear table {table}") + db.session.execute(table.delete()) + db.session.commit() @pytest.fixture() -def client(flask_app): - """A test client for the app.""" - return flask_app.test_client() +def _create_user(_test_client): + """Create a user for testing.""" + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): + response = _test_client.post( + "/auth/signup", + json={"email_address": "sample@gmail.com", "password": "test"}, + ) + assert response.status_code == 201 + +# Fixture to sign in the user for module testing @pytest.fixture() -def runner(flask_app): - """A test runner for the app's Click commands.""" - return flask_app.test_cli_runner() +def _login_user(_test_client): + """Sign in the user for testing.""" + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): + response = _test_client.post( + "/auth/login", + json={"email_address": "sample@gmail.com", "password": "test"}, + ) + + assert response.status_code == 200 diff --git a/tests/functional/test_server_launch.py b/tests/functional/test_server_launch.py new file mode 100644 index 00000000..a7b65e02 --- /dev/null +++ b/tests/functional/test_server_launch.py @@ -0,0 +1,28 @@ +"""Tests for API endpoints related to server launch""" +import json + + +def test_server_launch(_test_client): + """ + GIVEN a Flask application configured for testing + WHEN the '/echo' endpoint is requested (GET) + THEN check that the response shows that the server is active + """ + # Create a test client using the Flask application configured for testing + response = _test_client.get("/echo") + + # Temporary test until we have authentication + # assert response.status_code == 403 + + # Convert the response data from JSON to a Python dictionary + response_data = json.loads(response.data) + + # Check the response is correct + assert response_data == "Server active!" + + +# Empty database before testing and create a user for testing +def test_db_empty(_test_client, _empty_db, _create_user): + """Test that the database is empty.""" + print("Database empty") + print("User created for testing") diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index eadc6fa1..0871f278 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -1,23 +1,95 @@ """Tests for API endpoints related to studies""" -import os +import json -from app import create_app +import pytest -def test_should_return_studies(): +def test_post_study(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study + WHEN the '/study' endpoint is requested (POST) + THEN check that the response is valid + """ + # Crate a test using the Flask application configured for testing + response = _test_client.post( + "/study", + json={ + "title": "Study Title", + "image": "https://api.dicebear.com/6.x/adventurer/svg", + }, + ) + response_data = json.loads(response.data) + + assert response.status_code == 200 + assert response_data["title"] == "Study Title" + assert response_data["image"] == "https://api.dicebear.com/6.x/adventurer/svg" + pytest.global_study_id = response_data + + +def test_get_all_studies(_test_client, _login_user): """ GIVEN a Flask application configured for testing WHEN the '/study' endpoint is requested (GET) THEN check that the response is valid """ + response = _test_client.get("/study") + + response_data = json.loads(response.data) + assert len(response_data) == 1 # Only one study created + assert response.status_code == 200 + + +def test_update_study(_test_client, _login_user): + """ + GIVEN a study ID + WHEN the '/study' endpoint is requested (PUT) + THEN check that the study is updated with the inputed data + """ + # study_id = pytest.global_study_id["id"] + # response = _test_client.put( + # f"/study/{study_id}", + # json={ + # "id": pytest.global_study_id["id"], + # "title": "Study Title Updated", + # "image": pytest.global_study_id["image"], + # }, + # ) + # response_data = json.loads(response.data) - # Set the environment to testing - os.environ["FLASK_ENV"] = "testing" - flask_app = create_app() + # assert response.status_code == 200 + # assert response_data["title"] == "Study Title Updated" + # assert response_data["image"] == pytest.global_study_id["image"] + # assert response_data["id"] == pytest.global_study_id["id"] + # pytest.global_study_id = response_data - # Create a test client using the Flask application configured for testing - with flask_app.test_client() as test_client: - response = test_client.get("/study") - # Convert the response data from JSON to a Python dictionary - # Check the response is correct - assert response.status_code == 200 + +def test_get_study_by_id(_test_client, _login_user): + """ + GIVEN a study ID + WHEN the '/study/{study_id}' endpoint is requested (GET) + THEN check that the response is valid + """ + response = _test_client.get(f"/study/{pytest.global_study_id['id']}") # type: ignore # pylint: disable=line-too-long # noqa: E501 + + # Convert the response data from JSON to a Python dictionary + response_data = json.loads(response.data) + + # Check the response is correct + assert response.status_code == 200 + assert response_data["id"] == pytest.global_study_id["id"] # type: ignore + assert response_data["title"] == pytest.global_study_id["title"] # type: ignore + assert response_data["image"] == pytest.global_study_id["image"] # type: ignore + + +def test_delete_studies_created(_test_client, _login_user): + """ + Given a Flask application configured for testing + WHEN the '/study' endpoint is requested (DELETE) + THEN check that the response is valid (200) + THEN the '/study' endpoint is requested (GET) + THEN check if the study created has been deleted + """ + print("delete study created") + # TODO: DELETE ENDPOINT NOT WORKING + # with flask_app._test_client() as _test_client: + # response = _test_client.post("/study", json={ diff --git a/tests/functional/test_study_dataset_api.py b/tests/functional/test_study_dataset_api.py new file mode 100644 index 00000000..8d03b7d6 --- /dev/null +++ b/tests/functional/test_study_dataset_api.py @@ -0,0 +1,37 @@ +"""Tests for API endpoints related to datasets""" +import json + +import pytest + + +def test_get_all_dataset_from_study(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/dataset/{study_id}' endpoint is requested (GET) + THEN check that the response is valid and retrieves the dataset content + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/dataset") + response_data = json.loads(response.data) + assert response.status_code == 200 + print(response_data) + + +def test_post_dataset(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/dataset/{study_id}' endpoint is requested (POST) + THEN check that the response is valid and creates a dataset + """ + # study_id = pytest.global_study_id["id"] + # response = _test_client.post( + # f"/study/{study_id}/dataset", + # json={ + # "id": study_id, + # }, + # ) + + # assert response.status_code == 200 + # response_data = json.loads(response.data) + # pytest.global_dataset_id = response_data["id"] + # print(pytest.global_dataset_id) diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py new file mode 100644 index 00000000..0d50d2ae --- /dev/null +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -0,0 +1 @@ +"""Tests for the Dataset's Metadata API endpoints""" diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py new file mode 100644 index 00000000..aeba64d2 --- /dev/null +++ b/tests/functional/test_study_metadata_api.py @@ -0,0 +1,925 @@ +"""Tests for the Study Metadata API endpoints""" +import json + +import pytest + + +# ------------------- ARM METADATA ------------------- # +def test_post_arm_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/arm' endpoint is requested (POST) + THEN check that the response is vaild and create a new arm + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/arm", + json=[ + { + "label": "Label1", + "type": "Arm Type", + "description": "Arm Description", + "intervention_list": ["intervention1", "intervention2"], + } + ], + ) + + response_data = json.loads(response.data) + assert response.status_code == 200 + assert response_data["arms"][0]["label"] == "Label1" + assert response_data["arms"][0]["type"] == "Arm Type" + assert response_data["arms"][0]["description"] == "Arm Description" + assert response_data["arms"][0]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + pytest.global_arm_id = response_data["arms"][0]["id"] + + +def test_get_arm_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/arm/metadata' endpoint is requested (GET) + THEN check that the response is valid and retrieves the arm metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/arm") + response_data = json.loads(response.data) + assert response.status_code == 200 + assert response_data["arms"][0]["label"] == "Label1" + assert response_data["arms"][0]["type"] == "Arm Type" + assert response_data["arms"][0]["description"] == "Arm Description" + assert response_data["arms"][0]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + + +def test_delete_arm_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID and arm ID + WHEN the '/study/{study_id}/arm/metadata' endpoint is requested (DELETE) + THEN check that the response is valid and deletes the arm metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + arm_id = pytest.global_arm_id + response = _test_client.delete(f"/study/{study_id}/metadata/arm/{arm_id}") + assert response.status_code == 200 + + +# ------------------- IPD METADATA ------------------- # +def test_post_available_ipd_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (POST) + THEN check that the response is vaild and new IPD was created + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/available-ipd", + json=[ + { + "identifier": "identifier1", + "type": "type1", + "url": "google.com", + "comment": "comment1", + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_available_ipd_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "identifier1" + assert response_data[0]["type"] == "type1" + assert response_data[0]["url"] == "google.com" + assert response_data[0]["comment"] == "comment1" + + +def test_get_available_ipd_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (GET) + THEN check that the response is vaild and retrieves the available IPD(s) + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/available-ipd") + assert response.status_code == 200 + + +def test_delete_available_ipd_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID and available IPD ID + WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (DELETE) + THEN check that the response is vaild and deletes the available IPD + """ + study_id = pytest.global_study_id["id"] # type: ignore + available_ipd_id = pytest.global_available_ipd_id + response = _test_client.delete( + f"/study/{study_id}/metadata/available-ipd/{available_ipd_id}" + ) + assert response.status_code == 200 + + +# ------------------- CENTRAL CONTACT METADATA ------------------- # +def test_post_cc_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/central-contact' endpoint is requested (POST) + THEN check that the response is valid and creates the central contact metadata + """ + # BUG: ROLE IS RETURNED AS NONE + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/central-contact", + json=[ + { + "name": "central-contact", + "affiliation": "affiliation", + "role": "role", + "phone": "phone", + "phone_ext": "phone_ext", + "email_address": "email_address", + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_cc_id = response_data[0]["id"] + print("$$$$$$$$$") + print(response_data) + print("$$$$$$$$$") + + assert response_data[0]["name"] == "central-contact" + assert response_data[0]["affiliation"] == "affiliation" + # assert response_data[0]["role"] == "role" + assert response_data[0]["phone"] == "phone" + assert response_data[0]["phone_ext"] == "phone_ext" + assert response_data[0]["email_address"] == "email_address" + assert response_data[0]["central_contact"] is True + + +def test_get_cc_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/central-contact' endpoint is requested (GET) + THEN check that the response is valid and retrieves the central contact metadata + """ + # BUG: ROLE IS RETURNED AS NONE + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/central-contact") + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data[0]["name"] == "central-contact" + assert response_data[0]["affiliation"] == "affiliation" + # assert response_data[0]["role"] == "role" + assert response_data[0]["phone"] == "phone" + assert response_data[0]["phone_ext"] == "phone_ext" + assert response_data[0]["email_address"] == "email_address" + assert response_data[0]["central_contact"] is True + + +def test_delete_cc_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + and central contact ID + WHEN the '/study/{study_id}/metadata/central-contact/{central_contact_id}' + endpoint is requested (DELETE) + THEN check that the response is valid and deletes the central contact metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + central_contact_id = pytest.global_cc_id + response = _test_client.delete( + f"/study/{study_id}/metadata/central-contact/{central_contact_id}" + ) + assert response.status_code == 200 + + +# ------------------- COLLABORATORS METADATA ------------------- # +def test_get_collaborators_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (GET) + THEN check that the response is valid and retrieves the collaborators metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/collaborators") + assert response.status_code == 200 + + +def test_put_collaborators_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/collaborators' + endpoint is requested (POST) + THEN check that the response is valid and creates the collaborators metadata + """ + # BUG: ENDPOINT STORES KEY RATHER THAN VALUE + # RETURNS ['collaborator_name'] rather than ['collaborator'] + # (so it is storing the key rather than the value) + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/collaborators", + json=[ + "collaborator", + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data[0] == "collaborator" + + +# ------------------- CONDITIONS METADATA ------------------- # +def test_get_conditions_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) + THEN check that the response is valid and retrieves the conditions metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/conditions") + assert response.status_code == 200 + + +def test_put_conditions_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (POST) + THEN check that the response is valid and creates the conditions metadata + """ + # BUG: ENDPOINT STORES KEY RATHER THAN VALUE + # RESPONSE FOR THIS TEST LOOKS LIKE + # ['conditions', 'keywords', 'oversight_has_dmc', 'size'] + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/conditions", + json=[ + True, + "conditions string", + "keywords string", + "size string", + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + print("$$$$$$") + print(response_data) + print("$$$$$$") + + assert response_data[0] == "true" + assert response_data[1] == "conditions string" + assert response_data[2] == "keywords string" + assert response_data[3] == "size string" + + +# ------------------- DESCRIPTION METADATA ------------------- # +def test_get_description_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) + THEN check that the response is valid and retrieves the description metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/description") + assert response.status_code == 200 + + +def test_put_description_metadata(_test_client, _login_user): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/description' endpoint is requested (POST) + THEN check that the response is valid and creates the description metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/description", + json={ + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_description_id = response_data["id"] + + assert response_data["brief_summary"] == "brief_summary" + assert response_data["detailed_description"] == "detailed_description" + + +# ------------------- DESIGN METADATA ------------------- # +def test_get_design_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/design' endpoint is requested (GET) + THEN check that the response is valid and retrieves the design metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/design") + assert response.status_code == 200 + + +def test_put_design_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/design' endpoint is requested (PUT) + THEN check that the response is valid and creates the design metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/design", + json={ + "design_allocation": "dfasdfasd", + "study_type": "dffad", + "design_intervention_model": "eredf", + "design_intervention_model_description": "dfadf", + "design_primary_purpose": "dfasder", + "design_masking": "dfdasdf", + "design_masking_description": "tewsfdasf", + "design_who_masked_list": ["one", "two"], + "phase_list": ["three", "four"], + "enrollment_count": 3, + "enrollment_type": "dfasdf", + "number_arms": 2, + "design_observational_model_list": ["yes", "dfasd"], + "design_time_perspective_list": ["uhh"], + "bio_spec_retention": "dfasdf", + "bio_spec_description": "dfasdf", + "target_duration": "rewrwe", + "number_groups_cohorts": 1, + }, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["design_allocation"] == "dfasdfasd" + assert response_data["study_type"] == "dffad" + assert response_data["design_intervention_model"] == "eredf" + assert response_data["design_intervention_model_description"] == "dfadf" + assert response_data["design_primary_purpose"] == "dfasder" + assert response_data["design_masking"] == "dfdasdf" + assert response_data["design_masking_description"] == "tewsfdasf" + assert response_data["design_who_masked_list"] == ["one", "two"] + assert response_data["phase_list"] == ["three", "four"] + assert response_data["enrollment_count"] == 3 + assert response_data["enrollment_type"] == "dfasdf" + assert response_data["number_arms"] == 2 + assert response_data["design_observational_model_list"] == ["yes", "dfasd"] + assert response_data["design_time_perspective_list"] == ["uhh"] + assert response_data["bio_spec_retention"] == "dfasdf" + assert response_data["bio_spec_description"] == "dfasdf" + assert response_data["target_duration"] == "rewrwe" + assert response_data["number_groups_cohorts"] == 1 + + +# ------------------- ELIGIBILITY METADATA ------------------- # +def test_get_eligibility_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/eligibility' endpoint is requested (GET) + THEN check that the response is valid and retrieves the eligibility metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/eligibility") + assert response.status_code == 200 + + +def test_put_eligibility_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/eligibility' endpoint is requested (PUT) + THEN check that the response is valid and updates the eligibility metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/eligibility", + json={ + "gender": "nb", + "gender_based": "no", + "gender_description": "none", + "minimum_age_value": 18, + "maximum_age_value": 61, + "minimum_age_unit": "1", + "maximum_age_unit": "2", + "healthy_volunteers": "3", + "inclusion_criteria": ["test"], + "exclusion_criteria": ["test", "ttest"], + "study_population": "study_population", + "sampling_method": "test", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["gender"] == "nb" + assert response_data["gender_based"] == "no" + assert response_data["gender_description"] == "none" + assert response_data["minimum_age_value"] == 18 + assert response_data["maximum_age_value"] == 61 + assert response_data["minimum_age_unit"] == "1" + assert response_data["maximum_age_unit"] == "2" + assert response_data["healthy_volunteers"] == "3" + assert response_data["inclusion_criteria"] == ["test"] + assert response_data["exclusion_criteria"] == ["test", "ttest"] + assert response_data["study_population"] == "study_population" + assert response_data["sampling_method"] == "test" + + +# ------------------- IDENTIFICATION METADATA ------------------- # +def test_get_identification_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata + """ + # BUG: ENDPOINT NOT WORKING + # study_id = pytest.global_study_id["id"] # type: ignore + # response = _test_client.get(f"/study/{study_id}/metadata/identification") + # assert response.status_code == 200 + + +def test_post_identification_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (POST) + THEN check that the response is valid and creates the identification metadata + """ + # BUG: ENDPOINT NOT WORKING + # study_id = pytest.global_study_id["id"] # type: ignore + # response = _test_client.post( + # f"/study/{study_id}/metadata/identification", + # json={ + # "identifier": "identifier", + # "identifier_type": "identifier type", + # "identifier_value": "identifier value", + # "identifier_link": "identifier link", + # "secondary": "secondary" + # }, + # ) + # assert response.status_code == 200 + # response_data = json.loads(response.data) + # pytest.global_identification_id = response_data["id"] + + +def test_delete_identification_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata + """ + # BUG: ENDPOINT NOT WORKING + # study_id = pytest.global_study_id["id"] # type: ignore + # idenficiation_id = pytest.global_identification_id + # response = _test_client.delete(f"/study/{study_id}/metadata/identification/{identification_id}") # pylint: disable=line-too-long # noqa: E501 + # assert response.status_code == 200 + + +# ------------------- INTERVENTION METADATA ------------------- # +def test_get_intervention_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (GET) + THEN check that the response is valid and retrieves the intervention metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/intervention") + assert response.status_code == 200 + + +def test_post_intervention_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (POST) + THEN check that the response is valid and creates the intervention metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/intervention", + json=[ + { + "type": "intervention type", + "name": "intervention name", + "description": "intervention description", + "arm_group_label_list": ["arm group 1", "arm group 2"], + "other_name_list": ["other name 1", "other name 2"], + } + ], + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_intervention_id = response_data[0]["id"] + + assert response_data[0]["type"] == "intervention type" + assert response_data[0]["name"] == "intervention name" + assert response_data[0]["description"] == "intervention description" + assert response_data[0]["arm_group_label_list"] == ["arm group 1", "arm group 2"] + assert response_data[0]["other_name_list"] == ["other name 1", "other name 2"] + + +# ------------------- IPD SHARING METADATA ------------------- # +def test_get_ipdsharing_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (GET) + THEN check that the response is valid and retrieves the ipdsharing metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/ipdsharing") + assert response.status_code == 200 + + +def test_put_ipdsharing_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (PUT) + THEN check that the response is valid and updates the ipdsharing metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/ipdsharing", + json={ + "ipd_sharing": "ipd sharing", + "ipd_sharing_description": "sharing description", + "ipd_sharing_info_type_list": ["type1", "type2"], + "ipd_sharing_time_frame": "time frame", + "ipd_sharing_access_criteria": "access criteria", + "ipd_sharing_url": "sharing url", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["ipd_sharing"] == "ipd sharing" + assert response_data["ipd_sharing_description"] == "sharing description" + assert response_data["ipd_sharing_info_type_list"] == ["type1", "type2"] + assert response_data["ipd_sharing_time_frame"] == "time frame" + assert response_data["ipd_sharing_access_criteria"] == "access criteria" + assert response_data["ipd_sharing_url"] == "sharing url" + + +# ------------------- LINK METADATA ------------------- # +def test_get_link_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/link' endpoint is requested (GET) + THEN check that the response is valid and retrieves the link metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/link") + assert response.status_code == 200 + + +def test_post_link_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/link' endpoint is requested (POST) + THEN check that the response is valid and creates the link metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/link", + json=[{"url": "url link", "title": "title link"}], + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_link_id = response_data[0]["id"] + + assert response_data[0]["url"] == "url link" + assert response_data[0]["title"] == "title link" + + +def test_delete_link_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID and link ID + WHEN the '/study/{study_id}/metadata/link/{link_id}' endpoint is requested (DELETE) + THEN check that the response is valid and deletes the link metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + link_id = pytest.global_link_id + response = _test_client.delete(f"/study/{study_id}/metadata/link/{link_id}") + assert response.status_code == 200 + + +# ------------------- LOCATION METADATA ------------------- # +def test_get_location_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/location' endpoint is requested (GET) + THEN check that the response is valid and retrieves the location metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/location") + assert response.status_code == 200 + + +def test_post_location_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/location' endpoint is requested (POST) + THEN check that the response is valid and creates the location metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/location", + json=[ + { + "facility": "facility location", + "status": "status location", + "city": "city location", + "state": "California", + "zip": "zip location", + "country": "country location", + } + ], + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_location_id = response_data[0]["id"] + + assert response_data[0]["facility"] == "facility location" + assert response_data[0]["status"] == "status location" + assert response_data[0]["city"] == "city location" + assert response_data[0]["state"] == "California" + assert response_data[0]["zip"] == "zip location" + assert response_data[0]["country"] == "country location" + + +def test_delete_location_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID and location ID + WHEN the '/study/{study_id}/metadata/location/{location_id}' + endpoint is requested (DELETE) + THEN check that the response is valid and deletes the location metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + location_id = pytest.global_location_id + response = _test_client.delete(f"/study/{study_id}/metadata/location/{location_id}") + assert response.status_code == 200 + + +# ------------------- OTHER METADATA ------------------- # +def test_get_other_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/other' endpoint is requested (GET) + THEN check that the response is valid and retrieves the other metadata + """ + # BUG: KEYWORDS RETURNS A STRING '[]' INSTEAD OF A LIST + # BUG: CONDITIONS RETURNS A STRING '[]' INSTEAD OF A LIST (CONDITIONS ENDPOINT IS CAUSING WRONG RESPONSE HERE) # pylint: disable=line-too-long # noqa: E501 + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/other") + assert response.status_code == 200 + + +def test_put_other_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/other' endpoint is requested (PUT) + THEN check that the response is valid and updates the other metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/other", + json={ + "oversight_has_dmc": False, + "conditions": ["TESTCONDITION"], + "keywords": ["TEST"], + "size": "0", + }, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["oversight_has_dmc"] is False + assert response_data["conditions"] == ["TESTCONDITION"] + assert response_data["keywords"] == ["TEST"] + assert response_data["size"] == 0 + + +# ------------------- OVERALL-OFFICIAL METADATA ------------------- # +def test_get_overall_official_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/overall-official' endpoint is requested (GET) + THEN check that the response is valid and retrieves the overall-official metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/overall-official") + assert response.status_code == 200 + + +def test_post_overall_official_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/overall-official' endpoint is requested (POST) + THEN check that the response is valid and creates the overall-official metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/overall-official", + json=[ + { + "name": "official name", + "affiliation": "official affiliation", + "role": "official role", + } + ], + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_overall_official_id = response_data[0]["id"] + + assert response_data[0]["name"] == "official name" + assert response_data[0]["affiliation"] == "official affiliation" + assert response_data[0]["role"] == "official role" + + +def test_delete_overall_official_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a + study ID and overall official ID + WHEN the '/study/{study_id}/metadata/overall-official/{overall_official_id}' + endpoint is requested (DELETE) + THEN check that the response is valid and deletes the overall-official metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + overall_official_id = pytest.global_overall_official_id + response = _test_client.delete( + f"/study/{study_id}/metadata/overall-official/{overall_official_id}" + ) + assert response.status_code == 200 + + +# ------------------- OVERSIGHT METADATA ------------------- # +def test_get_oversight_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/oversight' endpoint is requested (GET) + THEN check that the response is valid and retrieves the oversight metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/oversight") + assert response.status_code == 200 + + +def test_put_oversight_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/oversight' endpoint is requested (PUT) + THEN check that the response is valid and updates the oversight metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": True} + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + print(response) + + assert response_data is True + + +# ------------------- REFERENCE METADATA ------------------- # +def test_get_reference_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (GET) + THEN check that the response is valid and retrieves the reference metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/reference") + assert response.status_code == 200 + + +def test_post_reference_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (POST) + THEN check that the response is valid and creates the reference metadata + """ + # BUG:? title key is not being returned in response (update: title isn't in the model) # pylint: disable=line-too-long # noqa: E501 + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/reference", + json=[ + { + "identifier": "reference identifier", + "type": "reference type", + "citation": "reference citation", + } + ], + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_reference_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "reference identifier" + assert response_data[0]["type"] == "reference type" + assert response_data[0]["citation"] == "reference citation" + + +def test_delete_reference_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and + a study ID and reference ID + WHEN the '/study/{study_id}/metadata/reference/{reference_id}' + endpoint is requested (DELETE) + THEN check that the response is valid and deletes the reference metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + reference_id = pytest.global_reference_id + response = _test_client.delete( + f"/study/{study_id}/metadata/reference/{reference_id}" + ) + assert response.status_code == 200 + + +# ------------------- SPONSORS METADATA ------------------- # +def test_get_sponsors_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (GET) + THEN check that the response is valid and retrieves the sponsors metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/sponsors") + assert response.status_code == 200 + + +def test_put_sponsors_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (PUT) + THEN check that the response is valid and updates the sponsors metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/sponsors", + json={ + "responsible_party_type": "party type", + "responsible_party_investigator_name": "party name", + "responsible_party_investigator_title": "party title", + "responsible_party_investigator_affiliation": "party affiliation", + "lead_sponsor_name": "sponsor name", + }, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["responsible_party_type"] == "party type" + assert response_data["responsible_party_investigator_name"] == "party name" + assert response_data["responsible_party_investigator_title"] == "party title" + assert ( + response_data["responsible_party_investigator_affiliation"] + == "party affiliation" # noqa: W503 + ) + assert response_data["lead_sponsor_name"] == "sponsor name" + + +# ------------------- STATUS METADATA ------------------- # +def test_get_status_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/status' endpoint is requested (GET) + THEN check that the response is valid and retrieves the status metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/status") + assert response.status_code == 200 + + +def test_put_status_metadata(_test_client, _login_user): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/status' endpoint is requested (PUT) + THEN check that the response is valid and updates the status metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.put( + f"/study/{study_id}/metadata/status", + json={ + "overall_status": "in progress", + "why_stopped": "not stopped", + "start_date": "no start", + "start_date_type": "date type", + "completion_date": "no completion", + "completion_date_type": "date type", + }, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["overall_status"] == "in progress" + assert response_data["why_stopped"] == "not stopped" + assert response_data["start_date"] == "no start" + assert response_data["start_date_type"] == "date type" + assert response_data["completion_date"] == "no completion" + assert response_data["completion_date_type"] == "date type" diff --git a/tests/unit/test_study_models.py b/tests/unit/test_study_models.py index 40e32567..6d9b249e 100644 --- a/tests/unit/test_study_models.py +++ b/tests/unit/test_study_models.py @@ -1,4 +1,6 @@ """Tests for the Study model""" +import uuid + from model.study import Study @@ -11,28 +13,13 @@ def test_new_study(): study = Study.from_data( { "title": "Study1", - "description": "This is a test study", "image": "https://api.dicebear.com/6.x/adventurer/svg", - "size": "100 GB", - "keywords": ["test", "study"], "last_updated": "2021-01-01", - "owner": { - "affiliations": "affiliations1", - "email": "email1", - "first_name": "first_name1", - "last_name": "last_name1", - "orcid": "orcid1", - "roles": ["role1", "role2"], - "permission": "permission1", - "status": "status1", - }, } ) assert study.title == "Study1" - assert study.description == "This is a test study" + assert uuid.UUID(study.id) assert study.image == "https://api.dicebear.com/6.x/adventurer/svg" - assert study.size == "100 GB" - assert study.keywords == ["test", "study"] - assert study.owner.affiliations == "affiliations1" + # assert study.owner.affiliations == "affiliations1" From 0e7de9af198987dd55a5b437e8094134ebc46de9 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 13 Oct 2023 17:29:59 -0700 Subject: [PATCH 297/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20docker=20?= =?UTF-8?q?compose=20file?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- prod-docker-compose.yaml | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml index e0afcb93..16667e39 100644 --- a/prod-docker-compose.yaml +++ b/prod-docker-compose.yaml @@ -4,28 +4,15 @@ services: build: context: . dockerfile: Dockerfile - # image: fairhub-flask-api:local entrypoint: - flask - run - - --host=0.0.0.0 + - --host=0.0.0.0 - --port=5000 ports: - 5000:5000 environment: FLASK_DEBUG: 0 FLASK_APP: ./app.py - FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/fairhub}" -# database: -# image: postgres:15.3 -# environment: -# - POSTGRES_USER=POSTGRES_USER -# - POSTGRES_PASSWORD=POSTGRES_PASSWORD -# - POSTGRES_DB=POSTGRES_DB -# ports: -# - 5432:5432 -# restart: always -# volumes: -# - db-data:/var/lib/postgresql/data -# volumes: -# db-data: + FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL}" + FAIRHUB_SECRET: "${FAIRHUB_SECRET}" From 37dd3dcf9d4fd2920a204ca55bf3cc2c1a87e24a Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 11:05:41 -0700 Subject: [PATCH 298/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/app.py b/app.py index 6a921349..514cecbd 100644 --- a/app.py +++ b/app.py @@ -57,14 +57,12 @@ def create_app(config_module=None): api.init_app(app) bcrypt.init_app(app) - # Only allow CORS origin for localhost:3000 + # Only allow CORS origin for localhost:3000 and any subdomain of azurestaticapps.net/ CORS( app, resources={ "/*": { - "origins": [ - "http://localhost:3000", - ], + "origins": ["http://localhost:3000", "https://*.azurestaticapps.net"], } }, allow_headers=[ From 77aee77d60b2bbfe7a0efc86c5fb33194a42cc18 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 11:47:41 -0700 Subject: [PATCH 299/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/app.py b/app.py index 514cecbd..34d043c9 100644 --- a/app.py +++ b/app.py @@ -62,7 +62,10 @@ def create_app(config_module=None): app, resources={ "/*": { - "origins": ["http://localhost:3000", "https://*.azurestaticapps.net"], + "origins": [ + "http://localhost:3000", + "https://brave-ground-*.centralus.2.azurestaticapps.net", + ], } }, allow_headers=[ From 49bd33dfac4b8b9c472ba1a728e150ecc99d26ac Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 12:14:50 -0700 Subject: [PATCH 300/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 34d043c9..e5ff31f7 100644 --- a/app.py +++ b/app.py @@ -64,7 +64,7 @@ def create_app(config_module=None): "/*": { "origins": [ "http://localhost:3000", - "https://brave-ground-*.centralus.2.azurestaticapps.net", + "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net/", ], } }, From eb85c2519846d6bc830ffe112c2654893056054b Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 12:16:45 -0700 Subject: [PATCH 301/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index e5ff31f7..584b264b 100644 --- a/app.py +++ b/app.py @@ -64,7 +64,7 @@ def create_app(config_module=None): "/*": { "origins": [ "http://localhost:3000", - "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net/", + "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net", ], } }, From 3089efd8ad541d1a92dceb87190ba07a992c4c56 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 13:01:54 -0700 Subject: [PATCH 302/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 584b264b..08c5d0e8 100644 --- a/app.py +++ b/app.py @@ -176,7 +176,7 @@ def on_after_request(resp): ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") - # resp.headers["Access-Control-Allow-Origin"] = "http://localhost:3000" + resp.headers["Access-Control-Allow-Origin"] = request.headers.get("Origin") # resp.headers["Access-Control-Allow-Credentials"] = "true" # resp.headers[ # "Access-Control-Allow-Headers" From 8d736b419988152dc6ee06afcbb6a22f9ba5b1c6 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 13:13:35 -0700 Subject: [PATCH 303/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/app.py b/app.py index 08c5d0e8..425bbc7f 100644 --- a/app.py +++ b/app.py @@ -57,15 +57,17 @@ def create_app(config_module=None): api.init_app(app) bcrypt.init_app(app) + allowed_origins = [ + "http://localhost:3000", + "https://brave-ground-*.centralus.2.azurestaticapps.net", + ] + # Only allow CORS origin for localhost:3000 and any subdomain of azurestaticapps.net/ CORS( app, resources={ "/*": { - "origins": [ - "http://localhost:3000", - "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net", - ], + "origins": allowed_origins, } }, allow_headers=[ @@ -176,7 +178,7 @@ def on_after_request(resp): ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") - resp.headers["Access-Control-Allow-Origin"] = request.headers.get("Origin") + resp.headers["Access-Control-Allow-Origin"] = allowed_origins # resp.headers["Access-Control-Allow-Credentials"] = "true" # resp.headers[ # "Access-Control-Allow-Headers" From 1abaf663c0f351f281db8563abce4e58a68bbdca Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 13:22:21 -0700 Subject: [PATCH 304/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/app.py b/app.py index 425bbc7f..68ee05ae 100644 --- a/app.py +++ b/app.py @@ -59,7 +59,7 @@ def create_app(config_module=None): allowed_origins = [ "http://localhost:3000", - "https://brave-ground-*.centralus.2.azurestaticapps.net", + "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net", ] # Only allow CORS origin for localhost:3000 and any subdomain of azurestaticapps.net/ @@ -67,7 +67,10 @@ def create_app(config_module=None): app, resources={ "/*": { - "origins": allowed_origins, + "origins": [ + "http://localhost:3000", + "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net", + ], } }, allow_headers=[ @@ -178,7 +181,11 @@ def on_after_request(resp): ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") - resp.headers["Access-Control-Allow-Origin"] = allowed_origins + print("after request") + print(resp.headers) + print(request.headers.get("Origin")) + + resp.headers["Access-Control-Allow-Origin"] = request.headers.get("Origin") # resp.headers["Access-Control-Allow-Credentials"] = "true" # resp.headers[ # "Access-Control-Allow-Headers" From 3b64ba69f75c9e2ae3b896259aef072295c106f6 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 13:49:14 -0700 Subject: [PATCH 305/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app.py b/app.py index 68ee05ae..9da35199 100644 --- a/app.py +++ b/app.py @@ -10,6 +10,7 @@ from flask_cors import CORS from sqlalchemy import MetaData +import logging import config import model from apis import api @@ -29,6 +30,9 @@ def create_app(config_module=None): app.config["SWAGGER_UI_DOC_EXPANSION"] = "none" app.config["RESTX_MASK_SWAGGER"] = False + # set up logging + logging.basicConfig(level=logging.DEBUG) + # Initialize config app.config.from_object(config_module or "config") @@ -181,10 +185,6 @@ def on_after_request(resp): ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") - print("after request") - print(resp.headers) - print(request.headers.get("Origin")) - resp.headers["Access-Control-Allow-Origin"] = request.headers.get("Origin") # resp.headers["Access-Control-Allow-Credentials"] = "true" # resp.headers[ From d6c7b96f28c294bac5c522274e76ca3dce0a6b40 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 13:52:08 -0700 Subject: [PATCH 306/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 9da35199..a73f715e 100644 --- a/app.py +++ b/app.py @@ -4,13 +4,13 @@ import os from datetime import timezone +import logging import jwt from flask import Flask, request from flask_bcrypt import Bcrypt from flask_cors import CORS from sqlalchemy import MetaData -import logging import config import model from apis import api @@ -185,6 +185,9 @@ def on_after_request(resp): ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") + print("after request") + print(request.headers.get("Origin")) + resp.headers["Access-Control-Allow-Origin"] = request.headers.get("Origin") # resp.headers["Access-Control-Allow-Credentials"] = "true" # resp.headers[ @@ -196,7 +199,7 @@ def on_after_request(resp): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" - # print(resp.headers) + print(resp.headers) return resp From 1b4e46fdac42200ea3a89fcec0c4a173906e1adc Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 18 Oct 2023 13:55:22 -0700 Subject: [PATCH 307/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20add=20cors=20route?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app.py b/app.py index a73f715e..0d9e31af 100644 --- a/app.py +++ b/app.py @@ -185,8 +185,8 @@ def on_after_request(resp): ) resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") - print("after request") - print(request.headers.get("Origin")) + app.logger.info("after request") + app.logger.info(request.headers.get("Origin")) resp.headers["Access-Control-Allow-Origin"] = request.headers.get("Origin") # resp.headers["Access-Control-Allow-Credentials"] = "true" @@ -199,7 +199,7 @@ def on_after_request(resp): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" - print(resp.headers) + app.logger.info(resp.headers) return resp From 5386ce2c3f7ab64b28948cf4ca426c51d3678cbd Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Thu, 19 Oct 2023 10:37:15 -0700 Subject: [PATCH 308/505] feat: add jsonschema validation for backend (#15) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: validation schema for sign up endpoint * style: 🎨 fix code style issues with Black * feat: validate email with email_validator * feat: schema validation for login * feat: schema validation for user details PUT * feat: POST, PUT schemas added for /study endpoint * feat: schema validation added for arm metadata * style: 🎨 fix code style issues with Black * feat: schema validation for available ipd * style: 🎨 fix code style issues with Black * feat: study contact schema validation added * feat: schema validation for collaborators * style: 🎨 fix code style issues with Black * feat: conditions schema validation added" * style: 🎨 fix code style issues with Black * feat: send better error message for invalid email * feat: schema validation for study description * feat: schema validation for study design * feat: schema validation for study intervention * schema validation for ipd sharing * feat: schema validation for study link * fix: fixed import typo * feat: schema validation for other metadata * feat: schema validation for study location * feat: schema validation for oversight endpoint * feat: schema validation for study reference * feat: schema validation for sponsors collaborators * feat: schema validate password meets all criterias on signup * wip: additional requirements/checks for schema validations * style: 🎨 fix code style issues with Black * feat: study design carries additional validations * feat: study contact carries additional validations * feat: study eligibility carries additional validations * feat: study intervention carries additional validations * feat: overal official has enums and other validations * feat: enums and other validations for location endpoint * feat: enums and additional validations for ipd sharing * feat: additional validations for study_status * feat: additional validations for study reference * feat: additional validations for available ipds * feat: enums added and additional validations for sponsor collaborators * additional enums and validations for study links * refactor: removed marshal_with for validation errors * feat: additional validations added for study contact * feat: enums added for enrollment_type in design endpoint * refactor: other metadata validation formatted * feat: minimum length added for validating sponsor collaborators * fix: remove double imports * refactor: isort imports * style: 🎨 fix code style issues with Black * feat: intervention schema validation added * feat: conditional requirements added to status metadata * feat: conditional validations added for sponsor collaborators * refactor: endpoint comment added * feat: additional validations for study identification * style: 🎨 fix code style issues with Black * feat: conditional validations and enums added for final endpoints * style: 🎨 fix code style issues with Black * fix: re-add study type for conditions endpoint * fix: remove last marshal_with on put, fix type def for data (overall-official) * fix: update pytest fixtures for validation schemas * style: 🎨 fix code style issues with Black * fix: updated pytests for schema validations * style: 🎨 fix code style issues with Black * fix: fix: invalid characters for regex * fix: update flake8 errors * style: 🎨 fix code style issues with Black * fix: update for flake8 errors * style: 🎨 fix code style issues with Black * fix: updating for flake8 issues * style: 🎨 fix code style issues with Black * fix: correct issues for pyflake8 * style: 🎨 fix code style issues with Black * fix: type def for request.json * fix: update for pylint issue * 🐛 fix: update schemas --------- Co-authored-by: Lint Action Co-authored-by: Sanjay Soundarajan --- apis/authentication.py | 105 ++++++- apis/dataset.py | 2 +- apis/dataset_metadata/dataset_consent.py | 4 +- apis/dataset_metadata/dataset_date.py | 4 +- .../dataset_de_ident_level.py | 4 +- .../dataset_managing_organization.py | 4 +- apis/dataset_metadata/dataset_other.py | 4 +- apis/dataset_metadata/dataset_readme.py | 4 +- apis/dataset_metadata/dataset_record_keys.py | 4 +- apis/dataset_metadata/dataset_related_item.py | 2 +- apis/study.py | 36 +++ apis/study_metadata/study_arm.py | 38 +++ apis/study_metadata/study_available_ipd.py | 38 ++- apis/study_metadata/study_contact.py | 61 +++- apis/study_metadata/study_description.py | 18 ++ apis/study_metadata/study_design.py | 235 ++++++++++++++- apis/study_metadata/study_eligibility.py | 36 +++ apis/study_metadata/study_identification.py | 27 ++ apis/study_metadata/study_intervention.py | 50 ++++ apis/study_metadata/study_ipdsharing.py | 58 ++++ apis/study_metadata/study_link.py | 21 ++ apis/study_metadata/study_location.py | 36 +++ apis/study_metadata/study_other.py | 56 ++++ apis/study_metadata/study_overall_official.py | 29 ++ apis/study_metadata/study_reference.py | 22 ++ .../study_sponsors_collaborators.py | 81 ++++++ apis/study_metadata/study_status.py | 59 ++++ apis/user.py | 59 +++- poetry.lock | 234 ++++++++++++++- pyproject.toml | 1 + tests/conftest.py | 10 +- tests/functional/test_study_metadata_api.py | 269 +++++++++--------- 32 files changed, 1444 insertions(+), 167 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 719eb633..729e9a43 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -9,8 +9,10 @@ from typing import Any, Union import jwt +from email_validator import EmailNotValidError, validate_email from flask import g, make_response, request from flask_restx import Namespace, Resource, fields +from jsonschema import FormatChecker, ValidationError, validate import model @@ -46,10 +48,71 @@ class SignUpUser(Resource): def post(self): """signs up the new users and saves data in DB""" data: Union[Any, dict] = request.json - # TODO data[email doesnt exist then raise error; json validation library - pattern = r"^[\w\.-]+@[\w\.-]+\.\w+$" - if not data["email_address"] or not re.match(pattern, data["email_address"]): - return "Email address is invalid", 422 + + def validate_is_valid_email(instance): + # Turn on check_deliverability + # for first-time validations like on account creation pages (but not + # login pages). + email_address = instance + try: + validate_email(email_address, check_deliverability=False) + return True + except EmailNotValidError as e: + raise ValidationError("Invalid email address format") from e + + def validate_password(instance): + password = instance + # Check if password is at least 8 characters long + if len(password) < 8: + raise ValidationError("Password must be at least 8 characters long") + + # Check if password contains at least one lowercase letter + if not re.search(r"[a-z]", password): + raise ValidationError( + "Password must contain at least one lowercase letter" + ) + + # Check if password contains at least one uppercase letter + if not re.search(r"[A-Z]", password): + raise ValidationError( + "Password must contain at least one uppercase letter" + ) + + # Check if password contains at least one digit + if not re.search(r"[0-9]", password): + raise ValidationError("Password must contain at least one digit") + + # Check if password contains at least one special character + if not re.search(r"[~`!@#$%^&*()_+\-={[}\]|:;\"'<,>.?/]", password): + raise ValidationError( + "Password must contain at least one special character" + ) + + return True + + # Schema validation + schema = { + "type": "object", + "required": ["email_address", "password"], + "additionalProperties": False, + "properties": { + "email_address": {"type": "string", "format": "valid_email"}, + "password": { + "type": "string", + "format": "password", + }, + }, + } + + format_checker = FormatChecker() + format_checker.checks("valid_email")(validate_is_valid_email) + format_checker.checks("password")(validate_password) + + try: + validate(instance=data, schema=schema, format_checker=format_checker) + except ValidationError as e: + return e.message, 400 + user = model.User.query.filter_by( email_address=data["email_address"] ).one_or_none() @@ -80,8 +143,40 @@ def post(self): email_address = data["email_address"] - user = model.User.query.filter_by(email_address=email_address).one_or_none() + def validate_is_valid_email(instance): + print("within is_valid_email") + email_address = instance + print(email_address) + try: + validate_email(email_address) + return True + except EmailNotValidError as e: + raise ValidationError("Invalid email address format") from e + + # Schema validation + schema = { + "type": "object", + "required": ["email_address", "password"], + "additionalProperties": False, + "properties": { + "email_address": { + "type": "string", + "format": "valid email", + "error_message": "Invalid email address", + }, + "password": {"type": "string", "minLength": 8}, + }, + } + + format_checker = FormatChecker() + format_checker.checks("valid email")(validate_is_valid_email) + + try: + validate(instance=data, schema=schema, format_checker=format_checker) + except ValidationError as e: + return e.message, 400 + user = model.User.query.filter_by(email_address=email_address).one_or_none() if not user: return "Invalid credentials", 401 diff --git a/apis/dataset.py b/apis/dataset.py index 5f71b0e0..ec2506d2 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -79,7 +79,7 @@ def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("update_dataset", study): return "Access denied, you can not modify", 403 - data = request.json + data: typing.Union[dict, typing.Any] = request.json data_obj = model.Dataset.query.get(dataset_id) data_obj.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index fbea4753..c6617c06 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,3 +1,5 @@ +import typing + from flask import request from flask_restx import Resource, fields @@ -31,7 +33,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_consent_] def put(self, study_id: int, dataset_id: int): - data = request.json + data: typing.Union[dict, typing.Any] = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_consent_ = dataset_.dataset_consent.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index ab2b9550..73ddff1c 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,3 +1,5 @@ +import typing + from flask import request from flask_restx import Resource, fields @@ -27,7 +29,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_date_] def put(self, study_id: int, dataset_id: int): - data = request.json + data: typing.Union[dict, typing.Any] = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_date_ = dataset_.dataset_date.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 3d38170f..082d73aa 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -1,3 +1,5 @@ +import typing + from flask import request from flask_restx import Resource, fields @@ -31,7 +33,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in de_ident_level_] def put(self, study_id: int, dataset_id: int): - data = request.json + data: typing.Union[dict, typing.Any] = request.json dataset_ = model.Dataset.query.get(dataset_id) de_ident_level_ = dataset_.dataset_de_ident_level.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 4a9f778b..475dbf38 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -1,3 +1,5 @@ +import typing + from flask import request from flask_restx import Resource, fields @@ -26,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in managing_organization_] def put(self, study_id: int, dataset_id: int): - data = request.json + data: typing.Union[dict, typing.Any] = request.json dataset_ = model.Dataset.query.get(dataset_id) managing_organization_ = dataset_.dataset_managing_organization.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index c93ac970..7d9941bd 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,3 +1,5 @@ +import typing + from flask import request from flask_restx import Resource, fields @@ -30,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_other_] def put(self, study_id: int, dataset_id: int): - data = request.json + data: typing.Union[dict, typing.Any] = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index b53f43c7..75e82560 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -1,3 +1,5 @@ +import typing + from flask import request from flask_restx import Resource, fields @@ -22,7 +24,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_readme_] def put(self, study_id: int, dataset_id: int): - data = request.json + data: typing.Union[dict, typing.Any] = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_readme_ = dataset_.dataset_readme.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index a440111b..1ed41648 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,3 +1,5 @@ +import typing + from flask import request from flask_restx import Resource, fields @@ -26,7 +28,7 @@ def get(self, study_id: int, dataset_id: int): return [d.to_dict() for d in dataset_record_keys_] def put(self, study_id: int, dataset_id: int): - data = request.json + data: typing.Union[dict, typing.Any] = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_record_keys_ = dataset_.dataset_de_ident_level.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 93b03da8..647483d1 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -40,7 +40,7 @@ def post(self, study_id: int, dataset_id: int): ) class DatasetRelatedItemUpdate(Resource): def put(self, study_id: int, dataset_id: int, related_item_id: int): - data = request.json + data: Union[Any, dict] = request.json dataset_related_item_ = model.DatasetRelatedItem.query.get(related_item_id) dataset_related_item_.update(data) model.db.session.commit() diff --git a/apis/study.py b/apis/study.py index 2166d441..ce5fc4a9 100644 --- a/apis/study.py +++ b/apis/study.py @@ -2,6 +2,7 @@ from flask import g, request from flask_restx import Namespace, Resource, fields, reqparse +from jsonschema import ValidationError, validate import model @@ -55,7 +56,25 @@ def get(self): @api.response(200, "Success") @api.response(400, "Validation Error") def post(self): + """Create a new study""" + # Schema validation + schema = { + "type": "object", + "required": ["title", "image"], + "additionalProperties": False, + "properties": { + "title": {"type": "string", "minLength": 1}, + "image": {"type": "string", "minLength": 1}, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: Union[Any, dict] = request.json + add_study = model.Study.from_data(data) model.db.session.add(add_study) study_id = add_study.id @@ -81,6 +100,23 @@ def get(self, study_id: int): @api.response(400, "Validation Error") @api.doc(description="Update a study's details") def put(self, study_id: int): + """Update a study""" + # Schema validation + schema = { + "type": "object", + "required": ["title", "image"], + "additionalProperties": False, + "properties": { + "title": {"type": "string", "minLength": 1}, + "image": {"type": "string", "minLength": 1}, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + update_study = model.Study.query.get(study_id) if not is_granted("update_study", update_study): return "Access denied, you can not modify", 403 diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 60a11fb0..fb88ae0e 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -46,6 +47,43 @@ def get(self, study_id): def post(self, study_id): """Create study arm metadata""" + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "label": {"type": "string", "minLength": 1}, + "type": { + "type": "string", + "enum": [ + "Experimental", + "Active Comparator", + "Placebo Comparator", + "Sham Comparator", + "No Intervention", + "Other", + ], + }, + "description": {"type": "string", "minLength": 1}, + "intervention_list": { + "type": "array", + "items": {"type": "string", "minLength": 1}, + "minItems": 1, + "uniqueItems": True, + }, + }, + "required": ["label", "type", "description", "intervention_list"], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study: model.Study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index abc590fb..fea92106 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -46,10 +47,45 @@ def get(self, study_id: int): ) @api.response(200, "Success") @api.response(400, "Validation Error") + # @api.marshal_with(study_available) + # marshal with will need to be removed to have validation errors return @api.expect(study_available) - @api.marshal_with(study_available) def post(self, study_id: int): """Create study available metadata""" + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "identifier": {"type": "string", "minLength": 1}, + "type": { + "type": "string", + "enum": [ + "Individual Participant Data Set", + "Study Protocol", + "Statistical Analysis Plan", + "Informated Consent Form", + "Clinical Study Report", + "Analytic Code", + "Other", + ], + }, + "comment": {"type": "string", "minLength": 1}, + "url": {"type": "string", "format": "uri", "minLength": 1}, + }, + "required": ["identifier", "type", "url"], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + print(e.message) + return e.message, 400 + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 552b09f4..199d1d93 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -1,8 +1,10 @@ """API routes for study contact metadata""" import typing +from email_validator import EmailNotValidError, validate_email from flask import request from flask_restx import Resource, fields +from jsonschema import FormatChecker, ValidationError, validate import model from apis.study_metadata_namespace import api @@ -44,6 +46,63 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study contact metadata""" + + def validate_is_valid_email(instance): + print("within is_valid_email") + email_address = instance + print(email_address) + try: + validate_email(email_address) + return True + except EmailNotValidError as e: + raise ValidationError("Invalid email address format") from e + + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "required": [ + "name", + "affiliation", + "phone", + "phone_ext", + "email_address", + ], + "properties": { + "name": {"type": "string", "minLength": 1}, + "affiliation": {"type": "string", "minLength": 1}, + "role": {"type": "string", "minLength": 1}, + "phone": { + "type": "string", + "minLength": 1, + "maxLength": 30, + "pattern": "^[0-9-]+$", + }, + "phone_ext": { + "type": "string", + "minLength": 1, + "pattern": "^[0-9-]+$", + "errorMessage": "Invalid phone extension", + }, + "email_address": {"type": "string", "format": "email"}, + "central_contact": {"type": "boolean"}, + }, + }, + "uniqueItems": True, + } + + format_checker = FormatChecker() + format_checker.checks("email")(validate_is_valid_email) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + return e.message, 400 + study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 @@ -72,10 +131,10 @@ class StudyContactUpdate(Resource): """Study Contact Metadata""" def delete(self, study_id: int, central_contact_id: int): + """Delete study contact metadata""" study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 - """Delete study contact metadata""" study_contact_ = model.StudyContact.query.get(central_contact_id) model.db.session.delete(study_contact_) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 47009f79..f0ab2f33 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -1,6 +1,7 @@ """API routes for study description metadata""" from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -35,6 +36,23 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study description metadata""" + study_obj = model.Study.query.get(study_id) + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "brief_summary": {"type": "string", "minLength": 1}, + "detailed_description": {"type": "string", "minLength": 1}, + }, + "required": ["brief_summary", "detailed_description"], + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index fe3fb0f9..fe7b84a0 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -1,6 +1,9 @@ """API routes for study design metadata""" +import typing + from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -50,13 +53,237 @@ def get(self, study_id: int): return study_design_.to_dict() def put(self, study_id: int): - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 """Update study design metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": ["study_type"], + "properties": { + "design_allocation": {"type": "string", "minLength": 1}, + "study_type": { + "type": "string", + "oneOf": [ + { + "enum": [ + "Interventional", + "Observational", + "Observational Patient Registry", + "Expanded Access", + ] + } + ], + }, + "design_intervention_model": { + "type": "string", + "oneOf": [ + { + "enum": [ + "Treatment", + "Prevention", + "Diagnostic", + "Supportive Care", + "Screening", + "Health Services Research", + "Basic Science", + "Device Feasibility", + ] + } + ], + }, + "design_intervention_model_description": { + "type": "string", + "minLength": 1, + }, + "design_primary_purpose": { + "type": "string", + "oneOf": [ + { + "enum": [ + "Single Group Assignment", + "Parallel Assignment", + "Crossover Assignment", + "Factorial Assignment", + "Sequential Assignment", + ] + } + ], + }, + "design_masking": { + "type": "string", + "oneOf": [ + { + "enum": [ + "None (open label)", + "Blinded (no details)" "Single", + "Double", + "Triple", + "Quadruple", + "N/A", + ] + } + ], + }, + "design_masking_description": {"type": "string", "minLength": 1}, + "design_who_masked_list": { + "type": "array", + "items": { + "type": "string", + "oneOf": [ + { + "enum": [ + "Participant", + "Care Provider", + "Investigator", + "Outcomes Assessor", + ] + }, + ], + }, + "minItems": 1, + "uniqueItems": True, + }, + "phase_list": { + "type": "array", + "items": { + "type": "string", + "oneOf": [ + { + "enum": [ + "N/A", + "Early Phase 1", + "Phase 1", + "Phase 1/2", + "Phase 2", + "Phase 2/3", + "Phase 3", + "Phase 4", + ] + } + ], + }, + "minItems": 1, + "uniqueItems": True, + }, + "enrollment_count": {"type": "integer"}, + "enrollment_type": { + "type": "string", + "enum": ["Actual", "Anticipated"], + }, + "number_arms": {"type": "integer"}, + "design_observational_model_list": { + "type": "array", + "items": { + "type": "string", + "oneOf": [ + { + "enum": [ + "Cohort", + "Case-Control", + "Case-Only", + "Case-Crossover", + "Ecologic or Community Study", + "Family-Based", + "Other", + ] + } + ], + }, + "minItems": 1, + "uniqueItems": True, + }, + "design_time_perspective_list": { + "type": "array", + "items": { + "type": "string", + "oneOf": [ + { + "enum": [ + "Retrospective", + "Prospective", + "Cross-sectional", + "Other", + ] + } + ], + }, + "minItems": 1, + "uniqueItems": True, + }, + "bio_spec_retention": { + "type": "string", + "oneOf": [ + { + "enum": [ + "None Retained", + "Samples With DNA", + "Samples Without DNA", + ] + } + ], + }, + "bio_spec_description": {"type": "string", "minLength": 1}, + "target_duration": {"type": "string", "minLength": 1}, + "number_groups_cohorts": {"type": "integer"}, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + # If schema validation passes, check other cases of validation + data: typing.Union[dict, typing.Any] = request.json + if data["study_type"] == "Interventional": + required_fields = [ + "design_allocation", + "design_intervention_model", + "design_primary_purpose", + "design_masking", + "design_who_masked_list", + "phase_list", + "enrollment_count", + "enrollment_type", + "number_arms", + ] + + for field in required_fields: + if field not in data: + return ( + ValidationError( + f"Field {field} is required for interventional studies" + ), + 400, + ) + + if data["study_type"] == "Observational": + required_fields = [ + "design_observational_model_list", + "design_time_perspective_list", + "bio_spec_retention", + "bio_spec_description", + "enrollment_count", + "enrollment_type", + "target_duration", + "number_groups_cohorts", + ] + + for field in required_fields: + if field not in data: + return ( + ValidationError( + f"Field {field} is required for observational studies" + ), + 400, + ) + study_ = model.Study.query.get(study_id) + # Check user permissions + if not is_granted("study_metadata", study_): + return "Access denied, you can not delete study", 403 - study_.study_design.update(request.json) + study_.study_design.update(data) model.db.session.commit() diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index e8d03b3e..7cafba9c 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -1,6 +1,7 @@ """API routes for study eligibility metadata""" from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -45,7 +46,42 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study eligibility metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "gender", + "gender_based", + "minimum_age_value", + "maximum_age_value", + ], + "properties": { + "gender": {"type": "string", "enum": ["All", "Female", "Male"]}, + "gender_based": {"type": "string", "enum": ["Yes", "No"]}, + "gender_description": {"type": "string"}, + "minimum_age_value": {"type": "integer"}, + "maximum_age_value": {"type": "integer"}, + "minimum_age_unit": {"type": "string", "minLength": 1}, + "maximum_age_unit": {"type": "string", "minLength": 1}, + "healthy_volunteers": {"type": "string", "enum": ["Yes", "No"]}, + "inclusion_criteria": {"type": "array", "items": {"type": "string"}}, + "exclusion_criteria": {"type": "array", "items": {"type": "string"}}, + "study_population": {"type": "string"}, + "sampling_method": { + "type": "string", + "enum": ["Non-Probability Sample", "Probability Sample"], + }, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study_ = model.Study.query.get(study_id) + # Check user permissions if not is_granted("study_metadata", study_): return "Access denied, you can not delete study", 403 study_.study_eligibility.update(request.json) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index f04fd167..4a44bd1c 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -43,6 +44,32 @@ def get(self, study_id: int): @api.expect(study_identification) def post(self, study_id: int): """Create study identification metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "primary": { + "type": "object", + "additionalProperties": False, + "properties": { + "identifier": {"type": "string", "minLength": 1}, + "identifier_type": {"type": "string", "minLength": 1}, + "identifier_domain": {"type": "string", "minLength": 1}, + "identifier_link": {"type": "string", "minLength": 1}, + }, + }, + "secondary": { + "type": "array", + }, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index dc7bd8a9..3bf88d99 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -45,6 +46,55 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study intervention metadata""" + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "type": { + "type": "string", + "enum": [ + "Drug", + "Device", + "Biological/Vaccine", + "Procedure/Surgery", + "Radiation", + "Behavioral", + "Behavioral", + "Genetic", + "Dietary Supplement", + "Combination Product", + "Diagnostic Test", + "Other", + ], + }, + "name": {"type": "string", "minLength": 1}, + "description": {"type": "string", "minLength": 1}, + "arm_group_label_list": { + "type": "array", + "items": {"type": "string", "minLength": 1}, + "minItems": 1, + "uniqueItems": True, + }, + "other_name_list": { + "type": "array", + "items": {"type": "string", "minLength": 1}, + "minItems": 1, + "uniqueItems": True, + }, + }, + "required": ["name", "type", "arm_group_label_list"], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return {"message": e.message}, 400 + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 5135c905..f82b6a75 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -1,6 +1,9 @@ """API routes for study ipdsharing metadata""" +import typing + from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -38,6 +41,61 @@ def get(self, study_id: int): def put(self, study_id: int): """Create study ipdsharing metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "ipd_sharing": {"type": "string", "enum": ["Yes", "No", "Undecided"]}, + "ipd_sharing_description": {"type": "string", "minLength": 1}, + "ipd_sharing_info_type_list": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "Study Protocol", + "Statistical Analysis Plan (SAP)", + "Informed Consent Form (ICF)", + "Clinical Study Report (CSR)", + "Analytical Code", + ], + }, + "minItems": 1, + "uniqueItems": True, + }, + "ipd_sharing_time_frame": {"type": "string", "minLength": 1}, + "ipd_sharing_access_criteria": {"type": "string", "minLength": 1}, + "ipd_sharing_url": {"type": "string", "format": "uri", "minLength": 1}, + }, + "required": [ + "ipd_sharing", + "ipd_sharing_description", + "ipd_sharing_info_type_list", + "ipd_sharing_time_frame", + "ipd_sharing_access_criteria", + "ipd_sharing_url", + ], + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[dict, typing.Any] = request.json + if data["ipd_sharing"] == "Yes": + required_fields = [ + "ipd_sharing_description", + "ipd_sharing_info_type_list", + "ipd_sharing_time_frame", + "ipd_sharing_access_criteria", + "ipd_sharing_url", + ] + + for field in required_fields: + if field not in data: + return f"Field {field} is required", 400 + study_ = model.Study.query.get(study_id) if not is_granted("study_metadata", study_): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index bd65715e..216632ad 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -37,6 +38,26 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study link metadata""" + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "url": {"type": "string", "format": "uri"}, + "title": {"type": "string", "minLength": 1}, + }, + "required": ["url", "title"], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 1b73f275..771bf381 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -44,6 +45,41 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study location metadata""" + # Schema validation + schema = { + "type": "array", + "additionalProperties": False, + "items": { + "type": "object", + "properties": { + "facility": {"type": "string", "minLength": 1}, + "status": { + "type": "string", + "enum": [ + "Withdrawn", + "Recruiting", + "Active, not recruiting", + "Not yet recruiting", + "Suspended", + "Enrolling by invitation", + "Completed", + "Terminated", + ], + }, + "city": {"type": "string", "minLength": 1}, + "state": {"type": "string", "minLength": 1}, + "zip": {"type": "string", "minLength": 1}, + "country": {"type": "string", "minLength": 1}, + }, + "required": ["facility", "status", "city", "country"], + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 730963d7..9030641c 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -40,6 +41,34 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study other metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "oversight_has_dmc": {"type": "boolean"}, + "conditions": { + "type": "array", + "items": {"type": "string"}, + "minItems": 1, + "uniqueItems": True, + }, + "keywords": { + "type": "array", + "items": {"type": "string"}, + "minItems": 1, + "uniqueItems": True, + }, + "size": {"type": "integer"}, + }, + "required": ["oversight_has_dmc", "conditions", "keywords", "size"], + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study_ = model.Study.query.get(study_id) study_.study_other.update(request.json) @@ -67,6 +96,19 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study oversight metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "properties": {"oversight_has_dmc": {"type": "boolean"}}, + "required": ["oversight_has_dmc"], + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 @@ -99,6 +141,20 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study conditions metadata""" + # Schema validation + schema = { + "type": "array", + "items": {"type": "string", "minLength": 1}, + "minItems": 1, + "uniqueItems": True, + "additionalItems": False, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 31ff7872..836145cb 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -47,6 +48,34 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def post(self, study_id: int): """Create study overall official metadata""" + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": {"type": "string", "minLength": 1}, + "affiliation": {"type": "string", "minLength": 1}, + "role": { + "type": "string", + "enum": [ + "Study Chair", + "Study Director", + "Study Principal Investigator", + ], + }, + }, + "required": ["name", "affiliation", "role"], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index c3362dbd..e05f0364 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -42,6 +43,27 @@ def get(self, study_id: int): def post(self, study_id: int): """Create study reference metadata""" + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "identifier": {"type": "string", "minLength": 1}, + "type": {"type": "string", "enum": ["Yes", "No"]}, + "citation": {"type": "string", "minLength": 1}, + }, + "required": ["citation", "identifier", "type"], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return {"message": e.message}, 400 + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 2a348599..b6f59b9d 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -3,6 +3,7 @@ from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -48,6 +49,75 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study sponsors metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "responsible_party_type", + "lead_sponsor_name", + "responsible_party_investigator_name", + "responsible_party_investigator_title", + "responsible_party_investigator_affiliation", + ], + "properties": { + "responsible_party_type": { + "type": "string", + "minLength": 1, + "enum": [ + "Sponsor", + "Principal Investigator", + "Sponsor-Investigator", + ], + }, + "responsible_party_investigator_name": { + "type": "string", + "minLength": 1, + }, + "responsible_party_investigator_title": { + "type": "string", + "minLength": 1, + }, + "responsible_party_investigator_affiliation": { + "type": "string", + "minLength": 1, + }, + "lead_sponsor_name": {"type": "string", "minLength": 1}, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[dict, typing.Any] = request.json + if data["responsible_party_type"] in [ + "Principal Investigator", + "Sponsor-Investigator", + ]: + if not data["responsible_party_investigator_name"]: + return ("Principal Investigator name is required", 400) + + if not data["responsible_party_investigator_title"]: + return ("Principal Investigator title is required", 400) + + if not data["responsible_party_investigator_affiliation"]: + return ("Principal Investigator affiliation is required", 400) + + investigator_name = data["responsible_party_investigator_name"] + investigator_title = data["responsible_party_investigator_title"] + investigator_affiliation = data[ + "responsible_party_investigator_affiliation" + ] + + if investigator_name == "": + return ("Principal Investigator name cannot be empty", 400) + if investigator_title == "": + return ("Principal Investigator title cannot be empty", 400) + if investigator_affiliation == "": + return ("Principal Investigator affiliation cannot be empty", 400) + study_ = model.Study.query.get(study_id) study_.study_sponsors_collaborators.update(request.json) @@ -77,6 +147,17 @@ def get(self, study_id: int): @api.response(400, "Validation Error") def put(self, study_id: int): """updating study collaborators""" + # Schema validation + schema = { + "type": "array", + "items": {"type": "string", "minLength": 1}, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index ef0df660..3ae9439b 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -1,6 +1,9 @@ """API routes for study status metadata""" +import typing + from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api @@ -40,6 +43,62 @@ def get(self, study_id: int): def put(self, study_id: int): """Update study status metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "start_date", + "start_date_type", + "overall_status", + "why_stopped", + "completion_date", + "completion_date_type", + ], + "properties": { + "overall_status": { + "type": "string", + "minLength": 1, + "enum": [ + "Withdrawn", + "Recruiting", + "Active, not recruiting", + "Not yet recruiting", + "Suspended", + "Enrolling by invitation", + "Terminated", + "Completed", + ], + }, + "why_stopped": {"type": "string", "minLength": 1}, + "start_date": {"type": "string", "minLength": 1}, + "start_date_type": { + "type": "string", + "minLength": 1, + "enum": ["Actual", "Anticipated"], + }, + "completion_date": {"type": "string", "minLength": 1}, + "completion_date_type": { + "type": "string", + "minLength": 1, + "enum": ["Actual", "Anticipated"], + }, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[typing.Any, dict] = request.json + if data["overall_status"] in ["Completed", "Terminated", "Suspended"]: + if "why_stopped" not in data or not data["why_stopped"]: + return ( + f"why_stopped is required for overall_status: {data['overall_status']}", + 400, + ) + study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 diff --git a/apis/user.py b/apis/user.py index 3084bdda..dd0c1401 100644 --- a/apis/user.py +++ b/apis/user.py @@ -1,7 +1,9 @@ from typing import Any, Union +from email_validator import EmailNotValidError, validate_email from flask import g, request from flask_restx import Namespace, Resource, fields +from jsonschema import FormatChecker, ValidationError, validate import model @@ -42,13 +44,62 @@ def get(self): return user_information @api.expect(study_model) - @api.marshal_with(study_model) + # @api.marshal_with(study_model) def put(self): """Updates user details""" - data: Union[Any, dict] = request.json - if data is None: - return {"message": "No data provided"}, 400 + def validate_is_valid_email(instance): + print("within is_valid_email") + email_address = instance + print(email_address) + try: + validate_email(email_address) + return True + except EmailNotValidError as e: + raise ValidationError("Invalid email address format") from e + + # Schema validation + # (profile_image is optional but additional properties are not allowed) + schema = { + "type": "object", + "required": [ + "email_address", + "username", + "first_name", + "last_name", + "institution", + "orcid", + "location", + "timezone", + ], + "additionalProperties": False, + "properties": { + "email_address": {"type": "string", "format": "valid email"}, + "username": {"type": "string", "minLength": 1}, + "first_name": {"type": "string", "minLength": 1}, + "last_name": {"type": "string", "minLength": 1}, + "institution": {"type": "string", "minLength": 1}, + "orcid": {"type": "string", "minLength": 1}, + "location": {"type": "string", "minLength": 1}, + "timezone": {"type": "string", "minLength": 1}, + "profile_image": {"type": "string", "minLength": 1}, # optional + }, + } + + format_checker = FormatChecker() + format_checker.checks("valid email")(validate_is_valid_email) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + # Verify if the user_information being sent + # back is okay for this 400 error, e.message is + # not being sent back + return e.message, 400 + + data: Union[Any, dict] = request.json user = model.User.query.get(g.user.id) # user.update(data) # don't update the username and email_address for now user_details = user.user_details diff --git a/poetry.lock b/poetry.lock index 273af104..5b50c575 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "aniso8601" version = "9.0.1" description = "A library for parsing ISO 8601 strings." +category = "main" optional = false python-versions = "*" files = [ @@ -18,6 +19,7 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -39,6 +41,7 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" +category = "dev" optional = false python-versions = "*" files = [ @@ -50,6 +53,7 @@ files = [ name = "argon2-cffi" version = "21.3.0" description = "The secure Argon2 password hashing algorithm." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -69,6 +73,7 @@ tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -106,6 +111,7 @@ tests = ["pytest"] name = "arrow" version = "1.2.3" description = "Better dates & times for Python" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -120,6 +126,7 @@ python-dateutil = ">=2.7.0" name = "art" version = "6.0" description = "ASCII Art Library For Python" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -134,6 +141,7 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture name = "astroid" version = "2.15.6" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -153,6 +161,7 @@ wrapt = [ name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" +category = "dev" optional = false python-versions = "*" files = [ @@ -170,6 +179,7 @@ test = ["astroid", "pytest"] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -184,6 +194,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -202,6 +213,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -216,6 +228,7 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" +category = "dev" optional = false python-versions = "*" files = [ @@ -227,6 +240,7 @@ files = [ name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -261,6 +275,7 @@ typecheck = ["mypy"] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" +category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -279,6 +294,7 @@ lxml = ["lxml"] name = "black" version = "23.7.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -325,6 +341,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -343,6 +360,7 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "blinker" version = "1.6.2" description = "Fast, simple object-to-object and broadcast signaling" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -354,6 +372,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -365,6 +384,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." +category = "dev" optional = false python-versions = "*" files = [ @@ -441,6 +461,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -525,6 +546,7 @@ files = [ name = "click" version = "8.1.6" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -539,6 +561,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -550,6 +573,7 @@ files = [ name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -569,6 +593,7 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -644,6 +669,7 @@ toml = ["tomli"] name = "coveragespace" version = "6.0.2" description = "A place to track your code coverage metrics." +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -662,6 +688,7 @@ requests = ">=2.28,<3.0" name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -689,6 +716,7 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -700,6 +728,7 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -711,6 +740,7 @@ files = [ name = "dicttoxml" version = "1.7.16" description = "Converts a Python dictionary or other native data type into a valid XML string." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -722,6 +752,7 @@ files = [ name = "dill" version = "0.3.7" description = "serialize all of Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -732,20 +763,58 @@ files = [ [package.extras] graph = ["objgraph (>=1.7.2)"] +[[package]] +name = "dnspython" +version = "2.4.2" +description = "DNS toolkit" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, + {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, +] + +[package.extras] +dnssec = ["cryptography (>=2.6,<42.0)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] +doq = ["aioquic (>=0.9.20)"] +idna = ["idna (>=2.1,<4.0)"] +trio = ["trio (>=0.14,<0.23)"] +wmi = ["wmi (>=1.5.1,<2.0.0)"] + [[package]] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" +category = "dev" optional = false python-versions = "*" files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] +[[package]] +name = "email-validator" +version = "2.0.0.post2" +description = "A robust email address syntax and deliverability validation library." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "email_validator-2.0.0.post2-py3-none-any.whl", hash = "sha256:2466ba57cda361fb7309fd3d5a225723c788ca4bbad32a0ebd5373b99730285c"}, + {file = "email_validator-2.0.0.post2.tar.gz", hash = "sha256:1ff6e86044200c56ae23595695c54e9614f4a9551e0e393614f764860b3d7900"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + [[package]] name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -760,6 +829,7 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" +category = "dev" optional = false python-versions = "*" files = [ @@ -774,6 +844,7 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "faker" version = "18.13.0" description = "Faker is a Python package that generates fake data for you." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -788,6 +859,7 @@ python-dateutil = ">=2.4" name = "fastjsonschema" version = "2.18.0" description = "Fastest Python implementation of JSON schema" +category = "dev" optional = false python-versions = "*" files = [ @@ -802,6 +874,7 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -818,6 +891,7 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.2" description = "A simple framework for building complex web applications." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -841,6 +915,7 @@ dotenv = ["python-dotenv"] name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." +category = "main" optional = false python-versions = "*" files = [ @@ -856,6 +931,7 @@ Flask = "*" name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" +category = "main" optional = false python-versions = "*" files = [ @@ -870,6 +946,7 @@ Flask = ">=0.9" name = "flask-restx" version = "1.1.0" description = "Fully featured framework for fast, easy and documented API development with Flask" +category = "main" optional = false python-versions = "*" files = [ @@ -893,6 +970,7 @@ test = ["Faker (==2.0.0)", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pyt name = "flask-sqlalchemy" version = "3.0.5" description = "Add SQLAlchemy support to your Flask application." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -908,6 +986,7 @@ sqlalchemy = ">=1.4.18" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -919,6 +998,7 @@ files = [ name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -933,6 +1013,7 @@ python-dateutil = ">=2.7" name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." +category = "dev" optional = false python-versions = "*" files = [ @@ -950,6 +1031,7 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -958,6 +1040,7 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -966,6 +1049,7 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -995,6 +1079,7 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -1003,6 +1088,7 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -1023,6 +1109,7 @@ test = ["objgraph", "psutil"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1034,6 +1121,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1053,6 +1141,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.0.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1071,6 +1160,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1082,6 +1172,7 @@ files = [ name = "ipykernel" version = "6.25.0" description = "IPython Kernel for Jupyter" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1095,7 +1186,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1115,6 +1206,7 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.12.2" description = "IPython: Productive Interactive Computing" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1154,6 +1246,7 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" +category = "dev" optional = false python-versions = "*" files = [ @@ -1165,6 +1258,7 @@ files = [ name = "ipywidgets" version = "8.1.0" description = "Jupyter interactive widgets" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1186,6 +1280,7 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1200,6 +1295,7 @@ arrow = ">=0.15.0" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1217,6 +1313,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1228,6 +1325,7 @@ files = [ name = "jedi" version = "0.19.0" description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1247,6 +1345,7 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1264,6 +1363,7 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." +category = "dev" optional = false python-versions = "*" files = [ @@ -1278,6 +1378,7 @@ dev = ["hypothesis"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1289,6 +1390,7 @@ files = [ name = "jsonschema" version = "4.18.4" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1320,6 +1422,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1335,6 +1438,7 @@ referencing = ">=0.28.0" name = "jupyter" version = "1.0.0" description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "dev" optional = false python-versions = "*" files = [ @@ -1355,6 +1459,7 @@ qtconsole = "*" name = "jupyter-client" version = "8.3.0" description = "Jupyter protocol implementation and client libraries" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1364,7 +1469,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1378,6 +1483,7 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-console" version = "6.6.3" description = "Jupyter terminal console" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1389,7 +1495,7 @@ files = [ ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" prompt-toolkit = ">=3.0.30" pygments = "*" pyzmq = ">=17" @@ -1402,6 +1508,7 @@ test = ["flaky", "pexpect", "pytest"] name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1422,6 +1529,7 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "jupyter-events" version = "0.7.0" description = "Jupyter Event System library" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1447,6 +1555,7 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p name = "jupyter-lsp" version = "2.2.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1462,6 +1571,7 @@ jupyter-server = ">=1.1.2" name = "jupyter-server" version = "2.7.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1474,7 +1584,7 @@ anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" jupyter-events = ">=0.6.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1498,6 +1608,7 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-terminals" version = "0.4.4" description = "A Jupyter Server Extension Providing Terminals." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1517,6 +1628,7 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", name = "jupyterlab" version = "4.0.3" description = "JupyterLab computational environment" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1550,6 +1662,7 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1561,6 +1674,7 @@ files = [ name = "jupyterlab-server" version = "2.24.0" description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1587,6 +1701,7 @@ test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-valida name = "jupyterlab-widgets" version = "3.0.8" description = "Jupyter interactive widgets for JupyterLab" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1598,6 +1713,7 @@ files = [ name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1643,6 +1759,7 @@ files = [ name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1660,6 +1777,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1683,6 +1801,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1719,6 +1847,7 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1733,6 +1862,7 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1744,6 +1874,7 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1755,6 +1886,7 @@ files = [ name = "minilog" version = "2.2" description = "Minimalistic wrapper for Python logging." +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1766,6 +1898,7 @@ files = [ name = "mistune" version = "3.0.1" description = "A sane and fast Markdown parser with useful plugins and renderers" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1777,6 +1910,7 @@ files = [ name = "mkdocs" version = "1.3.1" description = "Project documentation with Markdown." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1803,6 +1937,7 @@ i18n = ["babel (>=2.9.0)"] name = "mypy" version = "1.4.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1849,6 +1984,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1860,6 +1996,7 @@ files = [ name = "nbclient" version = "0.8.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1869,7 +2006,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -1882,6 +2019,7 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= name = "nbconvert" version = "7.7.3" description = "Converting Jupyter Notebooks" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1920,6 +2058,7 @@ webpdf = ["playwright"] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1941,6 +2080,7 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.7" description = "Patch asyncio to allow nested event loops" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1952,6 +2092,7 @@ files = [ name = "notebook" version = "7.0.1" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1976,6 +2117,7 @@ test = ["ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[tes name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1993,6 +2135,7 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2004,6 +2147,7 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2015,6 +2159,7 @@ files = [ name = "pandocfilters" version = "1.5.0" description = "Utilities for writing pandoc filters in python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2026,6 +2171,7 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2041,6 +2187,7 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2052,6 +2199,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2063,6 +2211,7 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." +category = "dev" optional = false python-versions = "*" files = [ @@ -2077,6 +2226,7 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" optional = false python-versions = "*" files = [ @@ -2088,6 +2238,7 @@ files = [ name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2099,6 +2250,7 @@ files = [ name = "platformdirs" version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2114,6 +2266,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2129,6 +2282,7 @@ testing = ["pytest", "pytest-benchmark"] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2147,6 +2301,7 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2161,6 +2316,7 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2175,6 +2331,7 @@ wcwidth = "*" name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2201,6 +2358,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2223,6 +2381,7 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -2234,6 +2393,7 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "dev" optional = false python-versions = "*" files = [ @@ -2248,6 +2408,7 @@ tests = ["pytest"] name = "pycodestyle" version = "2.11.0" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2259,6 +2420,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2270,6 +2432,7 @@ files = [ name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2287,6 +2450,7 @@ toml = ["tomli (>=1.2.3)"] name = "pyfairdatatools" version = "0.1.3" description = "Tools for AI-READI" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2309,6 +2473,7 @@ validators = ">=0.20.0,<0.21.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2320,6 +2485,7 @@ files = [ name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2334,6 +2500,7 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2351,6 +2518,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.5" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2380,6 +2548,7 @@ testutils = ["gitpython (>3)"] name = "pymdown-extensions" version = "10.1" description = "Extension pack for Python Markdown." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2395,6 +2564,7 @@ pyyaml = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2417,6 +2587,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2435,6 +2606,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-describe" version = "2.1.0" description = "Describe-style plugin for pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2449,6 +2621,7 @@ pytest = ">=4.6,<8" name = "pytest-expecter" version = "3.0" description = "Better testing with expecter and pytest." +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2460,6 +2633,7 @@ files = [ name = "pytest-random" version = "0.02" description = "py.test plugin to randomize tests" +category = "dev" optional = false python-versions = "*" files = [ @@ -2473,6 +2647,7 @@ pytest = ">=2.2.3" name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2487,6 +2662,7 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2501,6 +2677,7 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2512,6 +2689,7 @@ files = [ name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -2523,6 +2701,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "dev" optional = false python-versions = "*" files = [ @@ -2546,6 +2725,7 @@ files = [ name = "pywinpty" version = "2.0.11" description = "Pseudo terminal support for Windows from Python." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2560,6 +2740,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2619,6 +2800,7 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2633,6 +2815,7 @@ pyyaml = "*" name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2722,6 +2905,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qtconsole" version = "5.4.3" description = "Jupyter Qt console" +category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2748,6 +2932,7 @@ test = ["flaky", "pytest", "pytest-qt"] name = "qtpy" version = "2.3.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2765,6 +2950,7 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] name = "referencing" version = "0.30.0" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2780,6 +2966,7 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2801,6 +2988,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2815,6 +3003,7 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2826,6 +3015,7 @@ files = [ name = "rpds-py" version = "0.9.2" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2932,6 +3122,7 @@ files = [ name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -2948,6 +3139,7 @@ win32 = ["pywin32"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2959,6 +3151,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2970,6 +3163,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -2981,6 +3175,7 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2992,6 +3187,7 @@ files = [ name = "sqlalchemy" version = "2.0.19" description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3039,7 +3235,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} typing-extensions = ">=4.2.0" [package.extras] @@ -3070,6 +3266,7 @@ sqlcipher = ["sqlcipher3-binary"] name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" optional = false python-versions = "*" files = [ @@ -3089,6 +3286,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "terminado" version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3109,6 +3307,7 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3127,6 +3326,7 @@ test = ["flake8", "isort", "pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3138,6 +3338,7 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3149,6 +3350,7 @@ files = [ name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -3169,6 +3371,7 @@ files = [ name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3184,6 +3387,7 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-requests" version = "2.31.0.2" description = "Typing stubs for requests" +category = "main" optional = false python-versions = "*" files = [ @@ -3198,6 +3402,7 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" +category = "main" optional = false python-versions = "*" files = [ @@ -3209,6 +3414,7 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3220,6 +3426,7 @@ files = [ name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3234,6 +3441,7 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3250,6 +3458,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." +category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3266,6 +3475,7 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3305,6 +3515,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -3316,6 +3527,7 @@ files = [ name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3331,6 +3543,7 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" +category = "dev" optional = false python-versions = "*" files = [ @@ -3342,6 +3555,7 @@ files = [ name = "websocket-client" version = "1.6.1" description = "WebSocket client for Python with low level API options" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3358,6 +3572,7 @@ test = ["websockets"] name = "werkzeug" version = "2.3.6" description = "The comprehensive WSGI web application library." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3375,6 +3590,7 @@ watchdog = ["watchdog (>=2.3)"] name = "widgetsnbextension" version = "4.0.8" description = "Jupyter interactive widgets for Jupyter Notebook" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3386,6 +3602,7 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3470,6 +3687,7 @@ files = [ name = "zipp" version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3484,4 +3702,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "fffa54b318eb08630f9bfb6591a6b53322192e784fdddd79bd87025d7186fb0b" +content-hash = "dd13076e47cb3d15d794c986e0cb6d54a72f36accdf6a5441557da63171cfbb1" diff --git a/pyproject.toml b/pyproject.toml index c007db2b..ca68dfba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ psycopg2 = "^2.9.6" python-dotenv = "^1.0.0" flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" +email-validator = "^2.0.0.post2" [tool.poetry.group.dev.dependencies] diff --git a/tests/conftest.py b/tests/conftest.py index bfaf8cd5..88f31b11 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -63,7 +63,10 @@ def _create_user(_test_client): with unittest.mock.patch("pytest_config.TestConfig", TestConfig): response = _test_client.post( "/auth/signup", - json={"email_address": "sample@gmail.com", "password": "test"}, + json={ + "email_address": "sample@gmail.com", + "password": "Testingyeshello11!", + }, ) assert response.status_code == 201 @@ -76,7 +79,10 @@ def _login_user(_test_client): with unittest.mock.patch("pytest_config.TestConfig", TestConfig): response = _test_client.post( "/auth/login", - json={"email_address": "sample@gmail.com", "password": "test"}, + json={ + "email_address": "sample@gmail.com", + "password": "Testingyeshello11!", + }, ) assert response.status_code == 200 diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index aeba64d2..346c6ee3 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -17,7 +17,7 @@ def test_post_arm_metadata(_test_client, _login_user): json=[ { "label": "Label1", - "type": "Arm Type", + "type": "Experimental", "description": "Arm Description", "intervention_list": ["intervention1", "intervention2"], } @@ -25,9 +25,10 @@ def test_post_arm_metadata(_test_client, _login_user): ) response_data = json.loads(response.data) + assert response.status_code == 200 assert response_data["arms"][0]["label"] == "Label1" - assert response_data["arms"][0]["type"] == "Arm Type" + assert response_data["arms"][0]["type"] == "Experimental" assert response_data["arms"][0]["description"] == "Arm Description" assert response_data["arms"][0]["intervention_list"] == [ "intervention1", @@ -47,7 +48,7 @@ def test_get_arm_metadata(_test_client, _login_user): response_data = json.loads(response.data) assert response.status_code == 200 assert response_data["arms"][0]["label"] == "Label1" - assert response_data["arms"][0]["type"] == "Arm Type" + assert response_data["arms"][0]["type"] == "Experimental" assert response_data["arms"][0]["description"] == "Arm Description" assert response_data["arms"][0]["intervention_list"] == [ "intervention1", @@ -80,7 +81,7 @@ def test_post_available_ipd_metadata(_test_client, _login_user): json=[ { "identifier": "identifier1", - "type": "type1", + "type": "Clinical Study Report", "url": "google.com", "comment": "comment1", } @@ -92,7 +93,7 @@ def test_post_available_ipd_metadata(_test_client, _login_user): pytest.global_available_ipd_id = response_data[0]["id"] assert response_data[0]["identifier"] == "identifier1" - assert response_data[0]["type"] == "type1" + assert response_data[0]["type"] == "Clinical Study Report" assert response_data[0]["url"] == "google.com" assert response_data[0]["comment"] == "comment1" @@ -138,9 +139,9 @@ def test_post_cc_metadata(_test_client, _login_user): "name": "central-contact", "affiliation": "affiliation", "role": "role", - "phone": "phone", - "phone_ext": "phone_ext", - "email_address": "email_address", + "phone": "808", + "phone_ext": "909", + "email_address": "sample@gmail.com", } ], ) @@ -155,9 +156,9 @@ def test_post_cc_metadata(_test_client, _login_user): assert response_data[0]["name"] == "central-contact" assert response_data[0]["affiliation"] == "affiliation" # assert response_data[0]["role"] == "role" - assert response_data[0]["phone"] == "phone" - assert response_data[0]["phone_ext"] == "phone_ext" - assert response_data[0]["email_address"] == "email_address" + assert response_data[0]["phone"] == "808" + assert response_data[0]["phone_ext"] == "909" + assert response_data[0]["email_address"] == "sample@gmail.com" assert response_data[0]["central_contact"] is True @@ -176,9 +177,9 @@ def test_get_cc_metadata(_test_client, _login_user): assert response_data[0]["name"] == "central-contact" assert response_data[0]["affiliation"] == "affiliation" # assert response_data[0]["role"] == "role" - assert response_data[0]["phone"] == "phone" - assert response_data[0]["phone_ext"] == "phone_ext" - assert response_data[0]["email_address"] == "email_address" + assert response_data[0]["phone"] == "808" + assert response_data[0]["phone_ext"] == "909" + assert response_data[0]["email_address"] == "sample@gmail.com" assert response_data[0]["central_contact"] is True @@ -224,14 +225,14 @@ def test_put_collaborators_metadata(_test_client, _login_user): response = _test_client.put( f"/study/{study_id}/metadata/collaborators", json=[ - "collaborator", + "collaborator1123", ], ) assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data[0] == "collaborator" + assert response_data[0] == "collaborator1123" # ------------------- CONDITIONS METADATA ------------------- # @@ -259,7 +260,7 @@ def test_put_conditions_metadata(_test_client, _login_user): response = _test_client.put( f"/study/{study_id}/metadata/conditions", json=[ - True, + "true", "conditions string", "keywords string", "size string", @@ -336,20 +337,20 @@ def test_put_design_metadata(_test_client, _login_user): f"/study/{study_id}/metadata/design", json={ "design_allocation": "dfasdfasd", - "study_type": "dffad", - "design_intervention_model": "eredf", + "study_type": "Interventional", + "design_intervention_model": "Treatment", "design_intervention_model_description": "dfadf", - "design_primary_purpose": "dfasder", - "design_masking": "dfdasdf", + "design_primary_purpose": "Parallel Assignment", + "design_masking": "Double", "design_masking_description": "tewsfdasf", - "design_who_masked_list": ["one", "two"], - "phase_list": ["three", "four"], + "design_who_masked_list": ["Participant", "Care Provider"], + "phase_list": ["N/A"], "enrollment_count": 3, - "enrollment_type": "dfasdf", + "enrollment_type": "Actual", "number_arms": 2, - "design_observational_model_list": ["yes", "dfasd"], - "design_time_perspective_list": ["uhh"], - "bio_spec_retention": "dfasdf", + "design_observational_model_list": ["Cohort", "Case-Control"], + "design_time_perspective_list": ["Other"], + "bio_spec_retention": "None Retained", "bio_spec_description": "dfasdf", "target_duration": "rewrwe", "number_groups_cohorts": 1, @@ -359,20 +360,23 @@ def test_put_design_metadata(_test_client, _login_user): response_data = json.loads(response.data) assert response_data["design_allocation"] == "dfasdfasd" - assert response_data["study_type"] == "dffad" - assert response_data["design_intervention_model"] == "eredf" + assert response_data["study_type"] == "Interventional" + assert response_data["design_intervention_model"] == "Treatment" assert response_data["design_intervention_model_description"] == "dfadf" - assert response_data["design_primary_purpose"] == "dfasder" - assert response_data["design_masking"] == "dfdasdf" + assert response_data["design_primary_purpose"] == "Parallel Assignment" + assert response_data["design_masking"] == "Double" assert response_data["design_masking_description"] == "tewsfdasf" - assert response_data["design_who_masked_list"] == ["one", "two"] - assert response_data["phase_list"] == ["three", "four"] + assert response_data["design_who_masked_list"] == ["Participant", "Care Provider"] + assert response_data["phase_list"] == ["N/A"] assert response_data["enrollment_count"] == 3 - assert response_data["enrollment_type"] == "dfasdf" + assert response_data["enrollment_type"] == "Actual" assert response_data["number_arms"] == 2 - assert response_data["design_observational_model_list"] == ["yes", "dfasd"] - assert response_data["design_time_perspective_list"] == ["uhh"] - assert response_data["bio_spec_retention"] == "dfasdf" + assert response_data["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert response_data["design_time_perspective_list"] == ["Other"] + assert response_data["bio_spec_retention"] == "None Retained" assert response_data["bio_spec_description"] == "dfasdf" assert response_data["target_duration"] == "rewrwe" assert response_data["number_groups_cohorts"] == 1 @@ -400,36 +404,36 @@ def test_put_eligibility_metadata(_test_client, _login_user): response = _test_client.put( f"/study/{study_id}/metadata/eligibility", json={ - "gender": "nb", - "gender_based": "no", + "gender": "All", + "gender_based": "Yes", "gender_description": "none", "minimum_age_value": 18, "maximum_age_value": 61, "minimum_age_unit": "1", "maximum_age_unit": "2", - "healthy_volunteers": "3", - "inclusion_criteria": ["test"], - "exclusion_criteria": ["test", "ttest"], + "healthy_volunteers": "Yes", + "inclusion_criteria": ["tests"], + "exclusion_criteria": ["Probability Sample"], "study_population": "study_population", - "sampling_method": "test", + "sampling_method": "Probability Sample", }, ) assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["gender"] == "nb" - assert response_data["gender_based"] == "no" + assert response_data["gender"] == "All" + assert response_data["gender_based"] == "Yes" assert response_data["gender_description"] == "none" assert response_data["minimum_age_value"] == 18 assert response_data["maximum_age_value"] == 61 assert response_data["minimum_age_unit"] == "1" assert response_data["maximum_age_unit"] == "2" - assert response_data["healthy_volunteers"] == "3" - assert response_data["inclusion_criteria"] == ["test"] - assert response_data["exclusion_criteria"] == ["test", "ttest"] + assert response_data["healthy_volunteers"] == "Yes" + assert response_data["inclusion_criteria"] == ["tests"] + assert response_data["exclusion_criteria"] == ["Probability Sample"] assert response_data["study_population"] == "study_population" - assert response_data["sampling_method"] == "test" + assert response_data["sampling_method"] == "Probability Sample" # ------------------- IDENTIFICATION METADATA ------------------- # @@ -440,9 +444,9 @@ def test_get_identification_metadata(_test_client, _login_user): THEN check that the response is valid and retrieves the identification metadata """ # BUG: ENDPOINT NOT WORKING - # study_id = pytest.global_study_id["id"] # type: ignore - # response = _test_client.get(f"/study/{study_id}/metadata/identification") - # assert response.status_code == 200 + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.get(f"/study/{study_id}/metadata/identification") + assert response.status_code == 200 def test_post_identification_metadata(_test_client, _login_user): @@ -452,18 +456,20 @@ def test_post_identification_metadata(_test_client, _login_user): THEN check that the response is valid and creates the identification metadata """ # BUG: ENDPOINT NOT WORKING - # study_id = pytest.global_study_id["id"] # type: ignore - # response = _test_client.post( - # f"/study/{study_id}/metadata/identification", - # json={ - # "identifier": "identifier", - # "identifier_type": "identifier type", - # "identifier_value": "identifier value", - # "identifier_link": "identifier link", - # "secondary": "secondary" - # }, - # ) - # assert response.status_code == 200 + study_id = pytest.global_study_id["id"] # type: ignore + response = _test_client.post( + f"/study/{study_id}/metadata/identification", + json={ + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [], + }, + ) + assert response.status_code == 200 # response_data = json.loads(response.data) # pytest.global_identification_id = response_data["id"] @@ -504,11 +510,11 @@ def test_post_intervention_metadata(_test_client, _login_user): f"/study/{study_id}/metadata/intervention", json=[ { - "type": "intervention type", - "name": "intervention name", - "description": "intervention description", - "arm_group_label_list": ["arm group 1", "arm group 2"], - "other_name_list": ["other name 1", "other name 2"], + "type": "Device", + "name": "name test", + "description": "desc", + "arm_group_label_list": ["test", "one"], + "other_name_list": ["uhh", "yes"], } ], ) @@ -516,11 +522,11 @@ def test_post_intervention_metadata(_test_client, _login_user): response_data = json.loads(response.data) pytest.global_intervention_id = response_data[0]["id"] - assert response_data[0]["type"] == "intervention type" - assert response_data[0]["name"] == "intervention name" - assert response_data[0]["description"] == "intervention description" - assert response_data[0]["arm_group_label_list"] == ["arm group 1", "arm group 2"] - assert response_data[0]["other_name_list"] == ["other name 1", "other name 2"] + assert response_data[0]["type"] == "Device" + assert response_data[0]["name"] == "name test" + assert response_data[0]["description"] == "desc" + assert response_data[0]["arm_group_label_list"] == ["test", "one"] + assert response_data[0]["other_name_list"] == ["uhh", "yes"] # ------------------- IPD SHARING METADATA ------------------- # @@ -545,24 +551,27 @@ def test_put_ipdsharing_metadata(_test_client, _login_user): response = _test_client.put( f"/study/{study_id}/metadata/ipdsharing", json={ - "ipd_sharing": "ipd sharing", - "ipd_sharing_description": "sharing description", - "ipd_sharing_info_type_list": ["type1", "type2"], - "ipd_sharing_time_frame": "time frame", - "ipd_sharing_access_criteria": "access criteria", - "ipd_sharing_url": "sharing url", + "ipd_sharing": "Yes", + "ipd_sharing_description": "yes", + "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], + "ipd_sharing_time_frame": "uh", + "ipd_sharing_access_criteria": "Study Protocol", + "ipd_sharing_url": "1", }, ) assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["ipd_sharing"] == "ipd sharing" - assert response_data["ipd_sharing_description"] == "sharing description" - assert response_data["ipd_sharing_info_type_list"] == ["type1", "type2"] - assert response_data["ipd_sharing_time_frame"] == "time frame" - assert response_data["ipd_sharing_access_criteria"] == "access criteria" - assert response_data["ipd_sharing_url"] == "sharing url" + assert response_data["ipd_sharing"] == "Yes" + assert response_data["ipd_sharing_description"] == "yes" + assert response_data["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + assert response_data["ipd_sharing_time_frame"] == "uh" + assert response_data["ipd_sharing_access_criteria"] == "Study Protocol" + assert response_data["ipd_sharing_url"] == "1" # ------------------- LINK METADATA ------------------- # @@ -586,14 +595,14 @@ def test_post_link_metadata(_test_client, _login_user): study_id = pytest.global_study_id["id"] # type: ignore response = _test_client.post( f"/study/{study_id}/metadata/link", - json=[{"url": "url link", "title": "title link"}], + json=[{"url": "google.com", "title": "google link"}], ) assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_link_id = response_data[0]["id"] - assert response_data[0]["url"] == "url link" - assert response_data[0]["title"] == "title link" + assert response_data[0]["url"] == "google.com" + assert response_data[0]["title"] == "google link" def test_delete_link_metadata(_test_client, _login_user): @@ -631,12 +640,12 @@ def test_post_location_metadata(_test_client, _login_user): f"/study/{study_id}/metadata/location", json=[ { - "facility": "facility location", - "status": "status location", - "city": "city location", - "state": "California", - "zip": "zip location", - "country": "country location", + "facility": "test", + "status": "Withdrawn", + "city": "city", + "state": "ca", + "zip": "test", + "country": "yes", } ], ) @@ -644,12 +653,12 @@ def test_post_location_metadata(_test_client, _login_user): response_data = json.loads(response.data) pytest.global_location_id = response_data[0]["id"] - assert response_data[0]["facility"] == "facility location" - assert response_data[0]["status"] == "status location" - assert response_data[0]["city"] == "city location" - assert response_data[0]["state"] == "California" - assert response_data[0]["zip"] == "zip location" - assert response_data[0]["country"] == "country location" + assert response_data[0]["facility"] == "test" + assert response_data[0]["status"] == "Withdrawn" + assert response_data[0]["city"] == "city" + assert response_data[0]["state"] == "ca" + assert response_data[0]["zip"] == "test" + assert response_data[0]["country"] == "yes" def test_delete_location_metadata(_test_client, _login_user): @@ -690,18 +699,18 @@ def test_put_other_metadata(_test_client, _login_user): f"/study/{study_id}/metadata/other", json={ "oversight_has_dmc": False, - "conditions": ["TESTCONDITION"], - "keywords": ["TEST"], - "size": "0", + "conditions": ["true", "conditions", "keywords", "1"], + "keywords": ["true", "u"], + "size": 103, }, ) assert response.status_code == 200 response_data = json.loads(response.data) assert response_data["oversight_has_dmc"] is False - assert response_data["conditions"] == ["TESTCONDITION"] - assert response_data["keywords"] == ["TEST"] - assert response_data["size"] == 0 + assert response_data["conditions"] == ["true", "conditions", "keywords", "1"] + assert response_data["keywords"] == ["true", "u"] + assert response_data["size"] == 103 # ------------------- OVERALL-OFFICIAL METADATA ------------------- # @@ -725,21 +734,15 @@ def test_post_overall_official_metadata(_test_client, _login_user): study_id = pytest.global_study_id["id"] # type: ignore response = _test_client.post( f"/study/{study_id}/metadata/overall-official", - json=[ - { - "name": "official name", - "affiliation": "official affiliation", - "role": "official role", - } - ], + json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], ) assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_overall_official_id = response_data[0]["id"] - assert response_data[0]["name"] == "official name" - assert response_data[0]["affiliation"] == "official affiliation" - assert response_data[0]["role"] == "official role" + assert response_data[0]["name"] == "test" + assert response_data[0]["affiliation"] == "aff" + assert response_data[0]["role"] == "Study Chair" def test_delete_overall_official_metadata(_test_client, _login_user): @@ -812,7 +815,7 @@ def test_post_reference_metadata(_test_client, _login_user): json=[ { "identifier": "reference identifier", - "type": "reference type", + "type": "Yes", "citation": "reference citation", } ], @@ -822,7 +825,7 @@ def test_post_reference_metadata(_test_client, _login_user): pytest.global_reference_id = response_data[0]["id"] assert response_data[0]["identifier"] == "reference identifier" - assert response_data[0]["type"] == "reference type" + assert response_data[0]["type"] == "Yes" assert response_data[0]["citation"] == "reference citation" @@ -864,7 +867,7 @@ def test_put_sponsors_metadata(_test_client, _login_user): response = _test_client.put( f"/study/{study_id}/metadata/sponsors", json={ - "responsible_party_type": "party type", + "responsible_party_type": "Sponsor", "responsible_party_investigator_name": "party name", "responsible_party_investigator_title": "party title", "responsible_party_investigator_affiliation": "party affiliation", @@ -874,7 +877,7 @@ def test_put_sponsors_metadata(_test_client, _login_user): assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["responsible_party_type"] == "party type" + assert response_data["responsible_party_type"] == "Sponsor" assert response_data["responsible_party_investigator_name"] == "party name" assert response_data["responsible_party_investigator_title"] == "party title" assert ( @@ -906,20 +909,20 @@ def test_put_status_metadata(_test_client, _login_user): response = _test_client.put( f"/study/{study_id}/metadata/status", json={ - "overall_status": "in progress", - "why_stopped": "not stopped", - "start_date": "no start", - "start_date_type": "date type", - "completion_date": "no completion", - "completion_date_type": "date type", + "overall_status": "Withdrawn", + "why_stopped": "test", + "start_date": "fff", + "start_date_type": "Actual", + "completion_date": "nuzzzll", + "completion_date_type": "Actual", }, ) assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["overall_status"] == "in progress" - assert response_data["why_stopped"] == "not stopped" - assert response_data["start_date"] == "no start" - assert response_data["start_date_type"] == "date type" - assert response_data["completion_date"] == "no completion" - assert response_data["completion_date_type"] == "date type" + assert response_data["overall_status"] == "Withdrawn" + assert response_data["why_stopped"] == "test" + assert response_data["start_date"] == "fff" + assert response_data["start_date_type"] == "Actual" + assert response_data["completion_date"] == "nuzzzll" + assert response_data["completion_date_type"] == "Actual" From 757b4887fb14d59fdbd0112c20777271c73de976 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 19 Oct 2023 10:42:36 -0700 Subject: [PATCH 309/505] fix: remove allowed_origins --- app.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/app.py b/app.py index 0d9e31af..6dec8f87 100644 --- a/app.py +++ b/app.py @@ -61,11 +61,6 @@ def create_app(config_module=None): api.init_app(app) bcrypt.init_app(app) - allowed_origins = [ - "http://localhost:3000", - "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net", - ] - # Only allow CORS origin for localhost:3000 and any subdomain of azurestaticapps.net/ CORS( app, From 47dade9cdc24b75bcdc9dd6dc7515c4a1dd5df24 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 19 Oct 2023 10:56:12 -0700 Subject: [PATCH 310/505] =?UTF-8?q?=F0=9F=9A=A8=20chore:=20fix=20flake=20e?= =?UTF-8?q?rrors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 6dec8f87..89078909 100644 --- a/app.py +++ b/app.py @@ -61,14 +61,15 @@ def create_app(config_module=None): api.init_app(app) bcrypt.init_app(app) - # Only allow CORS origin for localhost:3000 and any subdomain of azurestaticapps.net/ + # Only allow CORS origin for localhost:3000 + # and any subdomain of azurestaticapps.net/ CORS( app, resources={ "/*": { "origins": [ "http://localhost:3000", - "https://brave-ground-07b6bfb10-datasetmetadata.centralus.2.azurestaticapps.net", + "https://brave-ground-*.centralus.2.azurestaticapps.net", ], } }, From 9e82f3e5fd1e707571e02aabda3636280208e650 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 19 Oct 2023 13:16:42 -0700 Subject: [PATCH 311/505] =?UTF-8?q?=F0=9F=92=9A=20fix:=20update=20cors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 ++ app.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/apis/authentication.py b/apis/authentication.py index 729e9a43..0fbee689 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -227,6 +227,8 @@ def authentication(): In addition, it handles error handling of expired token and non existed users""" g.user = None + print(request.cookies.get("token")) + if "token" not in request.cookies: return token: str = ( diff --git a/app.py b/app.py index 89078909..77cec55d 100644 --- a/app.py +++ b/app.py @@ -69,7 +69,7 @@ def create_app(config_module=None): "/*": { "origins": [ "http://localhost:3000", - "https://brave-ground-*.centralus.2.azurestaticapps.net", + "https://staging.fairhub.io", ], } }, From 39c0b71e3769ecff9570a80cf5508f7314217f29 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 19 Oct 2023 13:16:52 -0700 Subject: [PATCH 312/505] =?UTF-8?q?=F0=9F=92=9A=20fix:=20update=20cors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 4 ++-- app.py | 11 ++++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 0fbee689..89e0d2c9 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -215,7 +215,7 @@ def validate_is_valid_email(instance): resp = make_response(user.to_dict()) resp.set_cookie( - "token", encoded_jwt_code, secure=True, httponly=True, samesite="lax" + "token", encoded_jwt_code, secure=True, httponly=True, samesite="None" ) resp.status_code = 200 @@ -359,7 +359,7 @@ def post(self): "", secure=True, httponly=True, - samesite="lax", + samesite="None", expires=datetime.datetime.now(timezone.utc), ) resp.status_code = 204 diff --git a/app.py b/app.py index 77cec55d..c77f6f23 100644 --- a/app.py +++ b/app.py @@ -1,10 +1,10 @@ """Entry point for the application.""" import datetime import importlib +import logging import os from datetime import timezone -import logging import jwt from flask import Flask, request from flask_bcrypt import Bcrypt @@ -69,7 +69,8 @@ def create_app(config_module=None): "/*": { "origins": [ "http://localhost:3000", - "https://staging.fairhub.io", + "https:\/\/brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://fairhub.io", ], } }, @@ -163,7 +164,7 @@ def on_after_request(resp): "", secure=True, httponly=True, - samesite="lax", + samesite="None", expires=datetime.datetime.now(timezone.utc), ) return resp @@ -179,13 +180,13 @@ def on_after_request(resp): config.FAIRHUB_SECRET, algorithm="HS256", ) - resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="lax") + resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") app.logger.info("after request") app.logger.info(request.headers.get("Origin")) resp.headers["Access-Control-Allow-Origin"] = request.headers.get("Origin") - # resp.headers["Access-Control-Allow-Credentials"] = "true" + resp.headers["Access-Control-Allow-Credentials"] = "true" # resp.headers[ # "Access-Control-Allow-Headers" # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, From ab163ab673db4a52b7ae88c870c09d11a4424c97 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 19 Oct 2023 14:20:21 -0700 Subject: [PATCH 313/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20study=20?= =?UTF-8?q?identification=20schema?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_identification.py | 12 +++++++++--- app.py | 2 +- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 4a44bd1c..4d03c824 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -54,9 +54,15 @@ def post(self, study_id: int): "additionalProperties": False, "properties": { "identifier": {"type": "string", "minLength": 1}, - "identifier_type": {"type": "string", "minLength": 1}, - "identifier_domain": {"type": "string", "minLength": 1}, - "identifier_link": {"type": "string", "minLength": 1}, + "identifier_type": { + "type": "string", + }, + "identifier_domain": { + "type": "string", + }, + "identifier_link": { + "type": "string", + }, }, }, "secondary": { diff --git a/app.py b/app.py index c77f6f23..e3541bd2 100644 --- a/app.py +++ b/app.py @@ -68,7 +68,7 @@ def create_app(config_module=None): resources={ "/*": { "origins": [ - "http://localhost:3000", + "http://localhost:5000", "https:\/\/brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://fairhub.io", ], From fc6e8a6a0d20256a7662e16aca8758e1a2d58f59 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 19 Oct 2023 15:36:38 -0700 Subject: [PATCH 314/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20study=20?= =?UTF-8?q?identification=20schema?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 - apis/study_metadata/study_arm.py | 18 +--- apis/study_metadata/study_available_ipd.py | 4 +- apis/study_metadata/study_contact.py | 5 -- apis/study_metadata/study_description.py | 4 +- apis/study_metadata/study_design.py | 89 +++---------------- apis/study_metadata/study_eligibility.py | 19 ++-- apis/study_metadata/study_identification.py | 1 + apis/study_metadata/study_link.py | 2 +- apis/study_metadata/study_location.py | 4 +- apis/study_metadata/study_reference.py | 4 +- .../study_sponsors_collaborators.py | 3 - apis/study_metadata/study_status.py | 9 +- app.py | 2 +- 14 files changed, 45 insertions(+), 121 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 89e0d2c9..a8e968da 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -227,8 +227,6 @@ def authentication(): In addition, it handles error handling of expired token and non existed users""" g.user = None - print(request.cookies.get("token")) - if "token" not in request.cookies: return token: str = ( diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index fb88ae0e..d6b1abfc 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -54,23 +54,13 @@ def post(self, study_id): "type": "object", "additionalProperties": False, "properties": { + "id": {"type": "string"}, "label": {"type": "string", "minLength": 1}, - "type": { - "type": "string", - "enum": [ - "Experimental", - "Active Comparator", - "Placebo Comparator", - "Sham Comparator", - "No Intervention", - "Other", - ], - }, - "description": {"type": "string", "minLength": 1}, + "type": {"type": ["string", "null"]}, + "description": {"type": "string"}, "intervention_list": { "type": "array", - "items": {"type": "string", "minLength": 1}, - "minItems": 1, + "items": {"type": "string"}, "uniqueItems": True, }, }, diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index fea92106..3c57be7c 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -72,10 +72,10 @@ def post(self, study_id: int): "Other", ], }, - "comment": {"type": "string", "minLength": 1}, + "comment": {"type": "string"}, "url": {"type": "string", "format": "uri", "minLength": 1}, }, - "required": ["identifier", "type", "url"], + "required": ["identifier", "type", "url", "comment"], }, "uniqueItems": True, } diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 199d1d93..3c1a2466 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -77,14 +77,9 @@ def validate_is_valid_email(instance): "phone": { "type": "string", "minLength": 1, - "maxLength": 30, - "pattern": "^[0-9-]+$", }, "phone_ext": { "type": "string", - "minLength": 1, - "pattern": "^[0-9-]+$", - "errorMessage": "Invalid phone extension", }, "email_address": {"type": "string", "format": "email"}, "central_contact": {"type": "boolean"}, diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index f0ab2f33..bc7624a0 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -43,7 +43,9 @@ def put(self, study_id: int): "additionalProperties": False, "properties": { "brief_summary": {"type": "string", "minLength": 1}, - "detailed_description": {"type": "string", "minLength": 1}, + "detailed_description": { + "type": "string", + }, }, "required": ["brief_summary", "detailed_description"], } diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index fe7b84a0..d4499d86 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -60,71 +60,19 @@ def put(self, study_id: int): "additionalProperties": False, "required": ["study_type"], "properties": { - "design_allocation": {"type": "string", "minLength": 1}, + "design_allocation": {"type": ["string", "null"]}, "study_type": { - "type": "string", - "oneOf": [ - { - "enum": [ - "Interventional", - "Observational", - "Observational Patient Registry", - "Expanded Access", - ] - } - ], - }, - "design_intervention_model": { - "type": "string", - "oneOf": [ - { - "enum": [ - "Treatment", - "Prevention", - "Diagnostic", - "Supportive Care", - "Screening", - "Health Services Research", - "Basic Science", - "Device Feasibility", - ] - } - ], + "type": ["string", "null"], }, + "design_intervention_model": {"type": ["string", "null"]}, "design_intervention_model_description": { "type": "string", - "minLength": 1, }, - "design_primary_purpose": { + "design_primary_purpose": {"type": ["string", "null"]}, + "design_masking": {"type": ["string", "null"]}, + "design_masking_description": { "type": "string", - "oneOf": [ - { - "enum": [ - "Single Group Assignment", - "Parallel Assignment", - "Crossover Assignment", - "Factorial Assignment", - "Sequential Assignment", - ] - } - ], }, - "design_masking": { - "type": "string", - "oneOf": [ - { - "enum": [ - "None (open label)", - "Blinded (no details)" "Single", - "Double", - "Triple", - "Quadruple", - "N/A", - ] - } - ], - }, - "design_masking_description": {"type": "string", "minLength": 1}, "design_who_masked_list": { "type": "array", "items": { @@ -140,7 +88,6 @@ def put(self, study_id: int): }, ], }, - "minItems": 1, "uniqueItems": True, }, "phase_list": { @@ -162,7 +109,6 @@ def put(self, study_id: int): } ], }, - "minItems": 1, "uniqueItems": True, }, "enrollment_count": {"type": "integer"}, @@ -170,7 +116,7 @@ def put(self, study_id: int): "type": "string", "enum": ["Actual", "Anticipated"], }, - "number_arms": {"type": "integer"}, + "number_arms": {"type": ["integer", "null"]}, "design_observational_model_list": { "type": "array", "items": { @@ -189,7 +135,6 @@ def put(self, study_id: int): } ], }, - "minItems": 1, "uniqueItems": True, }, "design_time_perspective_list": { @@ -207,24 +152,16 @@ def put(self, study_id: int): } ], }, - "minItems": 1, "uniqueItems": True, }, - "bio_spec_retention": { + "bio_spec_retention": {"type": ["string", "null"]}, + "bio_spec_description": { + "type": "string", + }, + "target_duration": { "type": "string", - "oneOf": [ - { - "enum": [ - "None Retained", - "Samples With DNA", - "Samples Without DNA", - ] - } - ], }, - "bio_spec_description": {"type": "string", "minLength": 1}, - "target_duration": {"type": "string", "minLength": 1}, - "number_groups_cohorts": {"type": "integer"}, + "number_groups_cohorts": {"type": ["integer", "null"]}, }, } diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 7cafba9c..f8f3c17e 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -64,14 +64,19 @@ def put(self, study_id: int): "maximum_age_value": {"type": "integer"}, "minimum_age_unit": {"type": "string", "minLength": 1}, "maximum_age_unit": {"type": "string", "minLength": 1}, - "healthy_volunteers": {"type": "string", "enum": ["Yes", "No"]}, - "inclusion_criteria": {"type": "array", "items": {"type": "string"}}, - "exclusion_criteria": {"type": "array", "items": {"type": "string"}}, - "study_population": {"type": "string"}, - "sampling_method": { - "type": "string", - "enum": ["Non-Probability Sample", "Probability Sample"], + "healthy_volunteers": {"type": ["string", "null"]}, + "inclusion_criteria": { + "type": "array", + "items": {"type": "string"}, + "uniqueItems": True, + }, + "exclusion_criteria": { + "type": "array", + "items": {"type": "string"}, + "uniqueItems": True, }, + "study_population": {"type": "string"}, + "sampling_method": {"type": ["string", "null"]}, }, } diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 4d03c824..194aef99 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -56,6 +56,7 @@ def post(self, study_id: int): "identifier": {"type": "string", "minLength": 1}, "identifier_type": { "type": "string", + "minLength": 1, }, "identifier_domain": { "type": "string", diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 216632ad..76f5190f 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -46,7 +46,7 @@ def post(self, study_id: int): "additionalProperties": False, "properties": { "url": {"type": "string", "format": "uri"}, - "title": {"type": "string", "minLength": 1}, + "title": {"type": "string"}, }, "required": ["url", "title"], }, diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 771bf381..b495864f 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -67,8 +67,8 @@ def post(self, study_id: int): ], }, "city": {"type": "string", "minLength": 1}, - "state": {"type": "string", "minLength": 1}, - "zip": {"type": "string", "minLength": 1}, + "state": {"type": "string"}, + "zip": {"type": "string"}, "country": {"type": "string", "minLength": 1}, }, "required": ["facility", "status", "city", "country"], diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index e05f0364..871909d2 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -50,8 +50,8 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "properties": { - "identifier": {"type": "string", "minLength": 1}, - "type": {"type": "string", "enum": ["Yes", "No"]}, + "identifier": {"type": "string"}, + "type": {"type": ["string", "null"]}, "citation": {"type": "string", "minLength": 1}, }, "required": ["citation", "identifier", "type"], diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index b6f59b9d..73009e83 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -72,15 +72,12 @@ def put(self, study_id: int): }, "responsible_party_investigator_name": { "type": "string", - "minLength": 1, }, "responsible_party_investigator_title": { "type": "string", - "minLength": 1, }, "responsible_party_investigator_affiliation": { "type": "string", - "minLength": 1, }, "lead_sponsor_name": {"type": "string", "minLength": 1}, }, diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 3ae9439b..0840cf64 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -74,14 +74,13 @@ def put(self, study_id: int): "start_date": {"type": "string", "minLength": 1}, "start_date_type": { "type": "string", - "minLength": 1, "enum": ["Actual", "Anticipated"], }, - "completion_date": {"type": "string", "minLength": 1}, + "completion_date": { + "type": ["string", "null"], + }, "completion_date_type": { - "type": "string", - "minLength": 1, - "enum": ["Actual", "Anticipated"], + "type": ["string", "null"], }, }, } diff --git a/app.py b/app.py index e3541bd2..c77f6f23 100644 --- a/app.py +++ b/app.py @@ -68,7 +68,7 @@ def create_app(config_module=None): resources={ "/*": { "origins": [ - "http://localhost:5000", + "http://localhost:3000", "https:\/\/brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://fairhub.io", ], From ed9dd7516d07600a18ea552bb88d89a09f89047b Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Tue, 24 Oct 2023 16:02:58 -0700 Subject: [PATCH 315/505] feat: update dataset metadata endpoints (#16) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: reconfigure dataset metadata * chore: remove ids from 1-to-1 study metadata * style: format * chore: dataset models and endpoints refactored * chore: study models refactored * chore: dataset metadata functions were changed * fix: contributor and creator endpoints * chore: refactor dataset metadata endpoints * style: flake8 errors * style: poe format * style: pylint format check * style: 🎨 fix code style issues with Black * fix: study identification checked * wip: related item endpoint * wip: dataset related item endpoint * wip: pylint errors * 🔐 fix: update secret key length * fix: dataset medata relate item title * wip: related item in dataset metadata * style: 🎨 fix code style issues with Black * fix: dataset metadata related item * fix: study identifiers model * style: format errors check * style: pylint format * style: 🎨 fix code style issues with Black * style: permissions added to dataset metadata * style: formatting * style: 🎨 fix code style issues with Black * fix: dataset other * feat: dataset version GET * fix: study identifiers * fix: dataset identifier * fix: dataset related item other field * style: format * feat: add version readme to the model * feat: dataset title permission * feat: dataset description permission * style:format * style: 🎨 fix code style issues with Black * fix: minor fixes in dataset metadata * style: 🎨 fix code style issues with Black * feat: updates to dataset metadata * fix: dataset metadata and docker file * style: 🎨 fix code style issues with Black * fix: dataset metadata * fix: minor fix for contributor metadata --------- Co-authored-by: Lint Action Co-authored-by: Sanjay Soundarajan --- .../32e5ff331a78_add_token_blacklist.py | 26 -- .../639a13561089_delete_token_blacklist.py | 26 -- ...d33834_remove_column_in_token_blacklist.py | 26 -- .../versions/e6cc254fc968_token_blacklist.py | 26 -- apis/__init__.py | 16 +- apis/authentication.py | 3 + apis/contributor.py | 13 + apis/dataset.py | 56 +++- apis/dataset_metadata/__init__.py | 0 apis/dataset_metadata/dataset_access.py | 20 +- .../dataset_alternate_identifier.py | 32 ++- apis/dataset_metadata/dataset_consent.py | 21 +- apis/dataset_metadata/dataset_contributor.py | 134 +++++++++ apis/dataset_metadata/dataset_date.py | 56 +++- .../dataset_de_ident_level.py | 23 +- apis/dataset_metadata/dataset_description.py | 48 +++- apis/dataset_metadata/dataset_funder.py | 46 +++- .../dataset_managing_organization.py | 35 --- apis/dataset_metadata/dataset_other.py | 43 ++- apis/dataset_metadata/dataset_readme.py | 10 +- apis/dataset_metadata/dataset_record_keys.py | 28 +- apis/dataset_metadata/dataset_related_item.py | 258 ++++++++++++++++-- .../dataset_related_item_contributor.py | 26 -- .../dataset_related_item_identifier.py | 26 -- .../dataset_related_item_other.py | 27 -- .../dataset_related_item_title.py | 27 -- apis/dataset_metadata/dataset_rights.py | 55 +++- apis/dataset_metadata/dataset_subject.py | 55 +++- apis/dataset_metadata/dataset_title.py | 42 ++- apis/study.py | 25 +- apis/study_metadata/study_identification.py | 24 +- app.py | 7 +- model/__init__.py | 17 +- model/dataset.py | 49 +++- model/dataset_contributor.py | 53 ---- model/dataset_metadata/dataset_access.py | 20 +- .../dataset_alternate_identifier.py | 12 +- model/dataset_metadata/dataset_consent.py | 19 +- model/dataset_metadata/dataset_contributor.py | 58 ++++ .../dataset_contributor_affiliation.py | 15 +- model/dataset_metadata/dataset_date.py | 21 +- .../dataset_de_ident_level.py | 19 +- model/dataset_metadata/dataset_description.py | 14 +- model/dataset_metadata/dataset_funder.py | 8 +- .../dataset_managing_organization.py | 35 --- model/dataset_metadata/dataset_other.py | 46 +++- model/dataset_metadata/dataset_readme.py | 11 +- model/dataset_metadata/dataset_record_keys.py | 18 +- .../dataset_metadata/dataset_related_item.py | 76 +++++- .../dataset_related_item_contributor.py | 32 ++- .../dataset_related_item_identifier.py | 37 ++- .../dataset_related_item_other.py | 54 ++-- .../dataset_related_item_title.py | 15 +- model/dataset_metadata/dataset_rights.py | 6 + model/dataset_metadata/dataset_subject.py | 6 + model/dataset_metadata/dataset_title.py | 10 +- model/dataset_versions.py | 35 --- model/invited_study_contributor.py | 1 + model/notification.py | 47 ++++ model/participant.py | 5 +- model/study.py | 3 +- model/study_metadata/identifiers.py | 5 +- model/study_metadata/study_available_ipd.py | 2 +- model/study_metadata/study_description.py | 10 +- model/study_metadata/study_design.py | 50 ++-- model/study_metadata/study_eligibility.py | 40 ++- model/study_metadata/study_identification.py | 5 +- model/study_metadata/study_intervention.py | 2 +- model/study_metadata/study_ipdsharing.py | 14 +- model/study_metadata/study_location.py | 2 +- model/study_metadata/study_other.py | 10 +- .../study_metadata/study_overall_official.py | 2 +- model/study_metadata/study_reference.py | 2 +- .../study_sponsors_collaborators.py | 14 +- model/study_metadata/study_status.py | 10 +- model/token_blacklist.py | 3 + model/user.py | 2 + model/version.py | 28 +- model/version_readme.py | 25 ++ tests/functional/test_study_metadata_api.py | 2 +- 80 files changed, 1435 insertions(+), 795 deletions(-) delete mode 100644 alembic/versions/32e5ff331a78_add_token_blacklist.py delete mode 100644 alembic/versions/639a13561089_delete_token_blacklist.py delete mode 100644 alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py delete mode 100644 alembic/versions/e6cc254fc968_token_blacklist.py create mode 100644 apis/dataset_metadata/__init__.py create mode 100644 apis/dataset_metadata/dataset_contributor.py delete mode 100644 apis/dataset_metadata/dataset_managing_organization.py delete mode 100644 apis/dataset_metadata/dataset_related_item_contributor.py delete mode 100644 apis/dataset_metadata/dataset_related_item_identifier.py delete mode 100644 apis/dataset_metadata/dataset_related_item_other.py delete mode 100644 apis/dataset_metadata/dataset_related_item_title.py delete mode 100644 model/dataset_contributor.py create mode 100644 model/dataset_metadata/dataset_contributor.py delete mode 100644 model/dataset_metadata/dataset_managing_organization.py delete mode 100644 model/dataset_versions.py create mode 100644 model/notification.py create mode 100644 model/version_readme.py diff --git a/alembic/versions/32e5ff331a78_add_token_blacklist.py b/alembic/versions/32e5ff331a78_add_token_blacklist.py deleted file mode 100644 index 0ff84efd..00000000 --- a/alembic/versions/32e5ff331a78_add_token_blacklist.py +++ /dev/null @@ -1,26 +0,0 @@ -"""add token_blacklist - -Revision ID: 32e5ff331a78 -Revises: 639a13561089 -Create Date: 2023-10-09 11:10:06.568148 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "32e5ff331a78" -down_revision: Union[str, None] = "639a13561089" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade(): - op.add_column("token_blacklist", sa.Column("user_id", sa.String, nullable=True)) - - -def downgrade() -> None: - pass diff --git a/alembic/versions/639a13561089_delete_token_blacklist.py b/alembic/versions/639a13561089_delete_token_blacklist.py deleted file mode 100644 index e964c05b..00000000 --- a/alembic/versions/639a13561089_delete_token_blacklist.py +++ /dev/null @@ -1,26 +0,0 @@ -"""delete token_blacklist - -Revision ID: 639a13561089 -Revises: e6cc254fc968 -Create Date: 2023-10-08 23:14:48.882104 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "639a13561089" -down_revision: Union[str, None] = "e6cc254fc968" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - op.drop_column("token_blacklist", "user_id") - - -def downgrade() -> None: - pass diff --git a/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py b/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py deleted file mode 100644 index f783a7b9..00000000 --- a/alembic/versions/6d4271d33834_remove_column_in_token_blacklist.py +++ /dev/null @@ -1,26 +0,0 @@ -"""remove column in token_blacklist - -Revision ID: 6d4271d33834 -Revises: 32e5ff331a78 -Create Date: 2023-10-09 11:11:58.478289 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "6d4271d33834" -down_revision: Union[str, None] = "32e5ff331a78" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - op.drop_column("token_blacklist", "user_id") - - -def downgrade() -> None: - pass diff --git a/alembic/versions/e6cc254fc968_token_blacklist.py b/alembic/versions/e6cc254fc968_token_blacklist.py deleted file mode 100644 index 4fa8dcd1..00000000 --- a/alembic/versions/e6cc254fc968_token_blacklist.py +++ /dev/null @@ -1,26 +0,0 @@ -"""token_blacklist - -Revision ID: e6cc254fc968 -Revises: 3e48c46694c8 -Create Date: 2023-10-06 19:40:38.517323 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "e6cc254fc968" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade(): - op.add_column("token_blacklist", sa.Column("user_id", sa.String, nullable=True)) - - -def downgrade() -> None: - pass diff --git a/apis/__init__.py b/apis/__init__.py index 7bebacc9..26c8f39e 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -10,23 +10,15 @@ from .dataset_metadata.dataset_access import api as access from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier from .dataset_metadata.dataset_consent import api as consent +from .dataset_metadata.dataset_contributor import api as dataset_contributor from .dataset_metadata.dataset_date import api as date from .dataset_metadata.dataset_de_ident_level import api as de_ident_level from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder -from .dataset_metadata.dataset_managing_organization import api as managing_organization from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_readme import api as readme from .dataset_metadata.dataset_record_keys import api as record_keys from .dataset_metadata.dataset_related_item import api as related_item -from .dataset_metadata.dataset_related_item_contributor import ( - api as related_item_contributor, -) -from .dataset_metadata.dataset_related_item_identifier import ( - api as related_item_identifier, -) -from .dataset_metadata.dataset_related_item_other import api as related_item_other -from .dataset_metadata.dataset_related_item_title import api as related_item_title from .dataset_metadata.dataset_rights import api as rights from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_title import api as title @@ -69,16 +61,11 @@ "de_ident_level", "description", "funder", - "managing_organization", "dataset_other", "readme", "record_keys", "related_item", - "related_item_contributor", - "related_item_identifier", "api", - "related_item_other", - "related_item_title", "rights", "subject", "title", @@ -101,6 +88,7 @@ "user", "identification", "study_description", + "dataset_contributor", ] diff --git a/apis/authentication.py b/apis/authentication.py index a8e968da..ebf36b8d 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -297,6 +297,7 @@ def is_granted(permission: str, study=None): "add_dataset", "update_dataset", "delete_dataset", + "version", "publish_version", "participant", "study_metadata", @@ -314,6 +315,7 @@ def is_granted(permission: str, study=None): "add_dataset", "update_dataset", "delete_dataset", + "version", "publish_version", "participant", "study_metadata", @@ -329,6 +331,7 @@ def is_granted(permission: str, study=None): "delete_dataset", "participant", "study_metadata", + "version", "dataset_metadata", ], "viewer": ["viewer", "view"], diff --git a/apis/contributor.py b/apis/contributor.py index 9338a50b..72621e80 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -44,15 +44,28 @@ def post(self, study_id: int): if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify", 403 data: Union[dict, Any] = request.json + email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] + # encoded_jwt_code = jwt.encode( + # { + # "user": user.id, + # "exp": datetime.datetime.now(timezone.utc) + # + datetime.timedelta(minutes=180), # noqa: W503 + # "jti": str(uuid.uuid4()), + # }, # noqa: W503 + # config.FAIRHUB_SECRET, + # algorithm="HS256", + # ) contributor_ = None + try: if user: contributor_ = study_obj.add_user_to_study(user, permission) else: contributor_ = study_obj.invite_user_to_study(email_address, permission) + except model.StudyException as ex: return ex.args[0], 409 model.db.session.commit() diff --git a/apis/dataset.py b/apis/dataset.py index ec2506d2..b334cc29 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -55,9 +55,20 @@ def post(self, study_id): if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 # todo if study.participant id== different study Throw error - # data: typing.Union[typing.Any, dict] = request.json + data: typing.Union[typing.Any, dict] = request.json dataset_ = model.Dataset.from_data(study) model.db.session.add(dataset_) + + title_element = model.DatasetTitle.query.filter_by( + dataset_id=dataset_.id + ).first() + title_element.title = data["title"] + + description_element = model.DatasetDescription.query.filter_by( + dataset_id=dataset_.id + ).first() + description_element.description = data["description"] + model.db.session.commit() return dataset_.to_dict() @@ -114,7 +125,6 @@ class VersionResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.doc("dataset version") - @api.marshal_with(dataset_versions_model) def get( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument @@ -148,19 +158,47 @@ def delete( @api.route("/study//dataset//version") -@api.response(201, "Success") -@api.response(400, "Validation Error") class VersionList(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("versions") + def get(self, study_id: int, dataset_id: int): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + dataset_obj = model.Dataset.query.get(dataset_id) + return [i.to_dict() for i in dataset_obj.dataset_versions.all()], 200 + + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version add") def post(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) - if not is_granted("publish_version", study): + if not is_granted("version", study): return "Access denied, you can not modify", 403 + data: typing.Union[typing.Any, dict] = request.json - data["participants"] = [ - model.Participant.query.get(i) for i in data["participants"] - ] + # data["participants"] = [ + # model.Participant.query.get(i) for i in data["participants"] + # ] data_obj = model.Dataset.query.get(dataset_id) dataset_versions = model.Version.from_data(data_obj, data) model.db.session.add(dataset_versions) model.db.session.commit() - return jsonify(dataset_versions.to_dict()) + return dataset_versions.to_dict() + + +@api.route("/study//dataset//version//publish") +class PublishResource(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version publish") + def post(self, study_id: int, dataset_id: int): + study = model.Study.query.get(study_id) + if not is_granted("publish_version", study): + return "Access denied, you can not modify", 403 + data_obj = model.Dataset.query.get(dataset_id) + data: typing.Union[typing.Any, dict] = request.json + dataset_versions = model.Version.from_data(data_obj, data) + model.db.session.commit() + return dataset_versions.to_dict() diff --git a/apis/dataset_metadata/__init__.py b/apis/dataset_metadata/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index 3b9c7da2..0a5f3046 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -2,6 +2,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_access = api.model( @@ -21,14 +22,21 @@ class DatasetAccessResource(Resource): @api.doc("access") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_access) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(dataset_access) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_access_ = dataset_.dataset_access - return [d.to_dict() for d in dataset_access_] + return dataset_access_.to_dict() + + @api.doc("update access") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 - def put(self, study_id: int, dataset_id: int): dataset_ = model.Dataset.query.get(dataset_id) - dataset_access_ = dataset_.dataset_access.update(request.json) + dataset_.dataset_access.update(request.json) model.db.session.commit() - return dataset_access_.to_dict() + return dataset_.dataset_access.to_dict() diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index f95804c3..33ec00a2 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -4,6 +4,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_identifier = api.model( @@ -11,24 +12,31 @@ { "id": fields.String(required=True), "identifier": fields.String(required=True), - "identifier_type": fields.String(required=True), + "type": fields.String(required=False), "alternate": fields.Boolean(required=True), }, ) -@api.route("/study//dataset//metadata/identifier") +@api.route("/study//dataset//metadata/alternative-identifier") class DatasetAlternateIdentifierResource(Resource): @api.doc("identifier") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_identifier) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(dataset_identifier) + def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_identifier_ = dataset_.dataset_alternate_identifier return [d.to_dict() for d in dataset_identifier_] + @api.doc("update identifier") + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + # pylint: disable= unused-argument data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -51,13 +59,21 @@ def post(self, study_id: int, dataset_id: int): return list_of_elements @api.route( - "/study//dataset//metadata/identifier/" + "/study//dataset//" + "metadata/alternative-identifier/" ) class DatasetAlternateIdentifierUpdate(Resource): - def put(self, study_id: int, dataset_id: int, identifier_id: int): + @api.doc("delete identifier") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, study_id: int, dataset_id: int, identifier_id: int + ): # pylint: disable= unused-argument dataset_identifier_ = model.DatasetAlternateIdentifier.query.get( identifier_id ) - dataset_identifier_.update(request.json) + + model.db.session.delete(dataset_identifier_) model.db.session.commit() - return dataset_identifier_.to_dict() + + return 204 diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index c6617c06..7eded36e 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,9 +1,8 @@ -import typing - from flask import request from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_consent = api.model( @@ -26,15 +25,21 @@ class DatasetConsentResource(Resource): @api.doc("consent") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_consent) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(dataset_consent) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_consent_ = dataset_.dataset_consent - return [d.to_dict() for d in dataset_consent_] + return dataset_consent_.to_dict() + @api.doc("update consent") + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): - data: typing.Union[dict, typing.Any] = request.json + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent.update(data) + dataset_.dataset_consent.update(data) model.db.session.commit() - return dataset_consent_.to_dict() + return dataset_.dataset_consent.to_dict() diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py new file mode 100644 index 00000000..dd514624 --- /dev/null +++ b/apis/dataset_metadata/dataset_contributor.py @@ -0,0 +1,134 @@ +from typing import Any, Union + +from flask import request +from flask_restx import Resource + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_contributor = api.model( + "DatasetContributor", + {}, +) + + +@api.route("/study//dataset//metadata/contributor") +class DatasetContributorResource(Resource): + @api.doc("contributor") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(dataset_contributor) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + dataset_ = model.Dataset.query.get(dataset_id) + dataset_contributor_ = dataset_.dataset_contributors + return [d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"]] + + @api.doc("update contributor") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, can't modify dataset metadata", 403 + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_elements = [] + for i in data: + i["creator"] = False + if "id" in i and i["id"]: + dataset_contributor_ = model.DatasetContributor.query.get(i["id"]) + if not dataset_contributor_: + return f"Study link {i['id']} Id is not found", 404 + dataset_contributor_.update(i) + list_of_elements.append(dataset_contributor_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_contributor_ = model.DatasetContributor.from_data(data_obj, i) + model.db.session.add(dataset_contributor_) + list_of_elements.append(dataset_contributor_.to_dict()) + model.db.session.commit() + return list_of_elements + + +@api.route( + "/study//dataset//metadata/contributor/" +) +class DatasetContributorDelete(Resource): + @api.doc("delete contributor") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + contributor_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + contributor_ = model.DatasetContributor.query.get(contributor_id) + + model.db.session.delete(contributor_) + model.db.session.commit() + + return 204 + + +@api.route("/study//dataset//metadata/creator") +class DatasetCreatorResource(Resource): + @api.doc("creator") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(dataset_contributor) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + dataset_ = model.Dataset.query.get(dataset_id) + dataset_creator_ = dataset_.dataset_contributors + return [d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"]] + + @api.doc("update creator") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_elements = [] + for i in data: + i["creator"] = True + if "id" in i and i["id"]: + i["contributor_type"] = None + dataset_creator_ = model.DatasetContributor.query.get(i["id"]) + if not dataset_creator_: + return f"Study link {i['id']} Id is not found", 404 + dataset_creator_.update(i) + list_of_elements.append(dataset_creator_.to_dict()) + elif "id" not in i or not i["id"]: + i["contributor_type"] = None + dataset_creator_ = model.DatasetContributor.from_data(data_obj, i) + model.db.session.add(dataset_creator_) + list_of_elements.append(dataset_creator_.to_dict()) + model.db.session.commit() + return list_of_elements + + +@api.route("/study//dataset//metadata/creator/") +class DatasetCreatorDelete(Resource): + @api.doc("delete creator") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + creator_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_creator_ = model.DatasetContributor.query.get(creator_id) + model.db.session.delete(dataset_creator_) + model.db.session.commit() + + return 204 diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 73ddff1c..da2f9f3a 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,9 +1,10 @@ -import typing +from typing import Any, Union from flask import request from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_date = api.model( @@ -11,8 +12,8 @@ { "id": fields.String(required=True), "date": fields.String(required=True), - "date_type": fields.String(required=True), - "data_information": fields.String(required=True), + "type": fields.String(required=True), + "information": fields.String(required=True), }, ) @@ -22,15 +23,50 @@ class DatasetDateResource(Resource): @api.doc("date") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_date) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(dataset_date) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_date_ = dataset_.dataset_date return [d.to_dict() for d in dataset_date_] - def put(self, study_id: int, dataset_id: int): - data: typing.Union[dict, typing.Any] = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_date_ = dataset_.dataset_date.update(data) + @api.doc("update date") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_date_ = model.DatasetDate.query.get(i["id"]) + if not dataset_date_: + return f"Study link {i['id']} Id is not found", 404 + dataset_date_.update(i) + list_of_elements.append(dataset_date_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_date_ = model.DatasetDate.from_data(data_obj, i) + model.db.session.add(dataset_date_) + list_of_elements.append(dataset_date_.to_dict()) + model.db.session.commit() + return list_of_elements + + +@api.route("/study//dataset//metadata/date/") +class DatasetDateDeleteResource(Resource): + @api.doc("delete date") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, study_id: int, dataset_id: int, date_id: int + ): # pylint: disable= unused-argument + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + date_ = model.DatasetDate.query.get(date_id) + + model.db.session.delete(date_) model.db.session.commit() - return dataset_date_.to_dict() + return 204 diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index 082d73aa..e1974e7f 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -1,9 +1,8 @@ -import typing - from flask import request from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api de_ident_level = api.model( @@ -21,20 +20,26 @@ ) -@api.route("/study//dataset//metadata/de_ident_level") +@api.route("/study//dataset//metadata/de-identification-level") class DatasetDeIdentLevelResource(Resource): @api.doc("de_ident_level") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(de_ident_level) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(de_ident_level) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) de_ident_level_ = dataset_.dataset_de_ident_level - return [d.to_dict() for d in de_ident_level_] + return de_ident_level_.to_dict() + @api.doc("update ident level") + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): - data: typing.Union[dict, typing.Any] = request.json + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) - de_ident_level_ = dataset_.dataset_de_ident_level.update(data) + dataset_.dataset_de_ident_level.update(data) model.db.session.commit() - return de_ident_level_.to_dict() + return dataset_.dataset_de_ident_level.to_dict() diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 511a52a6..fa2f3df8 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -4,6 +4,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_description = api.model( @@ -21,22 +22,38 @@ class DatasetDescriptionResource(Resource): @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_description) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(dataset_description) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_description_ = dataset_.dataset_description return [d.to_dict() for d in dataset_description_] + @api.doc("update description") + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: if "id" in i and i["id"]: dataset_description_ = model.DatasetDescription.query.get(i["id"]) + # if dataset_description_.type == "Abstract": + # return ( + # "Abstract type can not be modified", + # 403, + # ) dataset_description_.update(i) list_of_elements.append(dataset_description_.to_dict()) elif "id" not in i or not i["id"]: + if i["type"] == "Abstract": + return ( + "Abstract type in description can not be given", + 403, + ) dataset_description_ = model.DatasetDescription.from_data(data_obj, i) model.db.session.add(dataset_description_) list_of_elements.append(dataset_description_.to_dict()) @@ -44,11 +61,32 @@ def post(self, study_id: int, dataset_id: int): return list_of_elements @api.route( - "/study//dataset//metadata/description/" + "/study//dataset//" + "metadata/description/" ) class DatasetDescriptionUpdate(Resource): - def delete(self, study_id: int, dataset_id: int, description_id: int): + @api.doc("delete description") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + description_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not make any change in dataset metadata", + 403, + ) dataset_description_ = model.DatasetDescription.query.get(description_id) + if dataset_description_.type == "Abstract": + return ( + "Abstract description can not be deleted", + 403, + ) model.db.session.delete(dataset_description_) model.db.session.commit() - return dataset_description_.to_dict() + + return 204 diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 4b2470f3..8b610a9e 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -4,6 +4,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_funder = api.model( @@ -27,24 +28,53 @@ class DatasetFunderResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset_funder) - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_funder_ = dataset_.dataset_funder return [d.to_dict() for d in dataset_funder_] - def post(self, study_id: int, dataset_id: int): + @api.doc("update funder") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument data: Union[Any, dict] = request.json + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 data_obj = model.Dataset.query.get(dataset_id) - dataset_funder_ = model.DatasetFunder.from_data(data_obj, data) - model.db.session.add(dataset_funder_) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_funder_ = model.DatasetFunder.query.get(i["id"]) + if not dataset_funder_: + return f"Study link {i['id']} Id is not found", 404 + dataset_funder_.update(i) + list_of_elements.append(dataset_funder_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_funder_ = model.DatasetFunder.from_data(data_obj, i) + model.db.session.add(dataset_funder_) + list_of_elements.append(dataset_funder_.to_dict()) model.db.session.commit() - return dataset_funder_.to_dict() + return list_of_elements @api.route("/study//dataset//metadata/funder/") class DatasetFunderUpdate(Resource): - def put(self, study_id: int, dataset_id: int, funder_id: int): + @api.doc("delete funder") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + funder_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 dataset_funder_ = model.DatasetFunder.query.get(funder_id) - dataset_funder_.update(request.json) + + model.db.session.delete(dataset_funder_) model.db.session.commit() - return dataset_funder_.to_dict() + + return 204 diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py deleted file mode 100644 index 475dbf38..00000000 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ /dev/null @@ -1,35 +0,0 @@ -import typing - -from flask import request -from flask_restx import Resource, fields - -import model -from apis.dataset_metadata_namespace import api - -managing_organization = api.model( - "DatasetManagingOrganization", - { - "id": fields.String(required=True), - "name": fields.String(required=True), - "ror_id": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/managing_organization") -class DatasetManagingOrganizationResource(Resource): - @api.doc("managing_organization") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(managing_organization) - def get(self, study_id: int, dataset_id: int): - dataset_ = model.Dataset.query.get(dataset_id) - managing_organization_ = dataset_.dataset_managing_organization - return [d.to_dict() for d in managing_organization_] - - def put(self, study_id: int, dataset_id: int): - data: typing.Union[dict, typing.Any] = request.json - dataset_ = model.Dataset.query.get(dataset_id) - managing_organization_ = dataset_.dataset_managing_organization.update(data) - model.db.session.commit() - return managing_organization_.to_dict() diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 7d9941bd..e78e259f 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,9 +1,8 @@ -import typing - from flask import request from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_other = api.model( @@ -26,14 +25,46 @@ class DatasetOtherResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset_other) - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other - return [d.to_dict() for d in dataset_other_] + return dataset_other_.to_dict() + @api.doc("other update") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_other) def put(self, study_id: int, dataset_id: int): - data: typing.Union[dict, typing.Any] = request.json + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) - dataset_other_ = dataset_.dataset_other.update(data) + dataset_.dataset_other.update(data) model.db.session.commit() + return dataset_.dataset_other.to_dict() + + +@api.route("/study//dataset//metadata/publisher") +class DatasetPublisherResource(Resource): + @api.doc("publisher") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(dataset_publisher) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + dataset_ = model.Dataset.query.get(dataset_id) + dataset_other_ = dataset_.dataset_other return dataset_other_.to_dict() + + @api.doc("update publisher") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_other.update(data) + model.db.session.commit() + return dataset_.dataset_other.to_dict() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index 75e82560..ec8101f6 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -1,9 +1,8 @@ -import typing - from flask import request from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_readme = api.model( @@ -18,13 +17,16 @@ class DatasetReadmeResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset_readme) - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_readme_ = dataset_.dataset_readme return [d.to_dict() for d in dataset_readme_] def put(self, study_id: int, dataset_id: int): - data: typing.Union[dict, typing.Any] = request.json + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_readme_ = dataset_.dataset_readme.update(data) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 1ed41648..fe0613dc 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,35 +1,41 @@ -import typing - from flask import request from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_record_keys = api.model( "DatasetRecordKeys", { "id": fields.String(required=True), - "key_type": fields.String(required=True), + "key_type": fields.String(required=False), "key_details": fields.String(required=True), }, ) -@api.route("/study//dataset//metadata/record_keys") +@api.route("/study//dataset//metadata/record-keys") class DatasetRecordKeysResource(Resource): - @api.doc("record_keys") + @api.doc("record keys") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_record_keys) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(dataset_record_keys) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) + dataset_record_keys_ = dataset_.dataset_record_keys - return [d.to_dict() for d in dataset_record_keys_] + return dataset_record_keys_.to_dict() + @api.doc("update record keys") + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): - data: typing.Union[dict, typing.Any] = request.json + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) - dataset_record_keys_ = dataset_.dataset_de_ident_level.update(data) + dataset_.dataset_record_keys.update(data) model.db.session.commit() - return dataset_record_keys_.to_dict() + return dataset_.dataset_record_keys.to_dict(), 201 diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 647483d1..35d938b8 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -4,6 +4,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_related_item = api.model( @@ -16,32 +17,253 @@ ) -@api.route("/study//dataset//metadata/related_item") +@api.route("/study//dataset//metadata/related-item") class DatasetRelatedItemResource(Resource): - @api.doc("related_item") + @api.doc("related item") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_related_item) - def get(self, study_id: int, dataset_id: int): + # @api.marshal_with(dataset_related_item) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_ = dataset_.dataset_related_item return [d.to_dict() for d in dataset_related_item_] + @api.doc("update related item") + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not" + " make any change in dataset metadata" # noqa: E402 + ), 403 data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) - dataset_related_item_ = model.DatasetRelatedItem.from_data(data_obj, data) - model.db.session.add(dataset_related_item_) + for i in data: + if "id" in i and i["id"]: + dataset_related_item_ = model.DatasetRelatedItem.query.get(i["id"]) + if not dataset_related_item_: + return f"{i['id']} Id is not found", 404 + dataset_related_item_.update(i) + # dataset_related_item_.dataset_related_item_other.update(i) + + for title in i["titles"]: + if "id" in title and title["id"]: + update_title = model.DatasetRelatedItemTitle.query.get( + title["id"] + ) + update_title.update(title) + else: + title_add = model.DatasetRelatedItemTitle.from_data( + dataset_related_item_, title + ) + model.db.session.add(title_add) + + for identifier in i["identifiers"]: + if "id" in identifier and identifier["id"]: + update_identifier = ( + model.DatasetRelatedItemIdentifier.query.get( + identifier["id"] + ) + ) + update_identifier.update(identifier) + else: + identifier_add = model.DatasetRelatedItemIdentifier.from_data( + dataset_related_item_, identifier + ) + model.db.session.add(identifier_add) + contributors_ = i["contributors"] + creators_ = i["creators"] + for c in contributors_: + if "id" in c and c["id"]: + related_item_contributors_ = ( + model.DatasetRelatedItemContributor.query.get(c["id"]) + ) + related_item_contributors_.update(c) + model.db.session.add(related_item_contributors_) + else: + related_item_contributors_ = ( + model.DatasetRelatedItemContributor.from_data( + dataset_related_item_, c, False + ) + ) + model.db.session.add(related_item_contributors_) + + for c in creators_: + if "id" in c and c["id"]: + related_item_creators_ = ( + model.DatasetRelatedItemContributor.query.get(c["id"]) + ) + + related_item_creators_.update(c) + else: + related_item_creators_ = ( + model.DatasetRelatedItemContributor.from_data( + dataset_related_item_, c, True + ) + ) + model.db.session.add(related_item_creators_) + + # list_of_elements.append(dataset_related_item_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_related_item_ = model.DatasetRelatedItem.from_data(data_obj, i) + model.db.session.add(dataset_related_item_) + + for t in i["titles"]: + title_add = model.DatasetRelatedItemTitle.from_data( + dataset_related_item_, t + ) + model.db.session.add(title_add) + + for identifier in i["identifiers"]: + identifier_add = model.DatasetRelatedItemIdentifier.from_data( + dataset_related_item_, identifier + ) + model.db.session.add(identifier_add) + + contributors_ = i["contributors"] + creators_ = i["creators"] + for c in contributors_: + related_item_contributors_ = ( + model.DatasetRelatedItemContributor.from_data( + dataset_related_item_, c, False + ) + ) + model.db.session.add(related_item_contributors_) + + for c in creators_: + related_item_creators_ = ( + model.DatasetRelatedItemContributor.from_data( + dataset_related_item_, c, True + ) + ) + model.db.session.add(related_item_creators_) + + model.db.session.commit() + return [item.to_dict() for item in data_obj.dataset_related_item], 201 + + +@api.route( + "/study//dataset//metadata/related-item/" +) +class DatasetRelatedItemUpdate(Resource): + @api.doc("delete related item") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_item_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_related_item_ = model.DatasetRelatedItem.query.get(related_item_id) + + model.db.session.delete(dataset_related_item_) + model.db.session.commit() + + return 204 + + +@api.route( + "/study//dataset//metadata/related-item/" + "/contributor/" +) +class RelatedItemContributorsDelete(Resource): + @api.doc("delete related item contributors") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_item_id: int, # pylint: disable= unused-argument + contributor_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_contributors_ = model.DatasetRelatedItemContributor.query.get( + contributor_id + ) + model.db.session.delete(dataset_contributors_) + model.db.session.commit() + + return 204 + + +@api.route( + "/study//dataset//metadata/" + "related-item//title/" +) +class RelatedItemTitlesDelete(Resource): + @api.doc("delete related item title") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_item_id: int, # pylint: disable= unused-argument + title_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_title_ = model.DatasetRelatedItemTitle.query.get(title_id) + model.db.session.delete(dataset_title_) + model.db.session.commit() + return 204 + + +@api.route( + "/study//dataset//metadata/" + "related-item//identifier/" +) +class RelatedItemIdentifiersDelete(Resource): + @api.doc("delete related item identifier") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_item_id: int, # pylint: disable= unused-argument + identifier_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_identifier_ = model.DatasetRelatedItemIdentifier.query.get( + identifier_id + ) + model.db.session.delete(dataset_identifier_) + model.db.session.commit() + return 204 + + +@api.route( + "/study//dataset//metadata/related-item/" + "/creator/" # pylint: disable = line-too-long +) +class RelatedItemCreatorDelete(Resource): + @api.doc("delete related item creator") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_item_id: int, # pylint: disable= unused-argument + creator_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_creator_ = model.DatasetRelatedItemContributor.query.get(creator_id) + model.db.session.delete(dataset_creator_) model.db.session.commit() - return dataset_related_item_.to_dict() - - @api.route( - "/study//dataset//metadata/related_item/" - ) - class DatasetRelatedItemUpdate(Resource): - def put(self, study_id: int, dataset_id: int, related_item_id: int): - data: Union[Any, dict] = request.json - dataset_related_item_ = model.DatasetRelatedItem.query.get(related_item_id) - dataset_related_item_.update(data) - model.db.session.commit() - return dataset_related_item_.to_dict() + return 204 diff --git a/apis/dataset_metadata/dataset_related_item_contributor.py b/apis/dataset_metadata/dataset_related_item_contributor.py deleted file mode 100644 index d7d2a09a..00000000 --- a/apis/dataset_metadata/dataset_related_item_contributor.py +++ /dev/null @@ -1,26 +0,0 @@ -from flask_restx import Resource - -import model -from apis.dataset_metadata_namespace import api - -# dataset_related_item_contributor = api.model( -# "DatasetRelatedItemContributor", -# { -# "id": fields.String(required=True), -# "type": fields.String(required=True), -# "relation_type": fields.String(required=True), -# -# }, -# ) - - -@api.route("/study//dataset//metadata/related_item_identifier") -class DatasetRelatedItemContributorResource(Resource): - @api.doc("related_item_identifier") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_related_item_contributor) - def get(self, study_id: int, dataset_id: int): - dataset_ = model.Dataset.query.get(dataset_id) - dataset_related_item_contributor_ = dataset_.dataset_related_item_contributor - return [d.to_dict() for d in dataset_related_item_contributor_] diff --git a/apis/dataset_metadata/dataset_related_item_identifier.py b/apis/dataset_metadata/dataset_related_item_identifier.py deleted file mode 100644 index d1f6695b..00000000 --- a/apis/dataset_metadata/dataset_related_item_identifier.py +++ /dev/null @@ -1,26 +0,0 @@ -from flask_restx import Resource - -import model -from apis.dataset_metadata_namespace import api - -# dataset_related_item_contributor = api.model( -# "DatasetRelatedItemContributor", -# { -# "id": fields.String(required=True), -# "type": fields.String(required=True), -# "relation_type": fields.String(required=True), -# -# }, -# ) - - -@api.route("/study//dataset//metadata/related_item_contributor") -class DatasetRelatedItemContributorResource(Resource): - @api.doc("related_item_contributor") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_related_item_contributor) - def get(self, study_id: int, dataset_id: int): - dataset_ = model.Dataset.query.get(dataset_id) - dataset_related_item_ = dataset_.dataset_related_item - return [d.to_dict() for d in dataset_related_item_] diff --git a/apis/dataset_metadata/dataset_related_item_other.py b/apis/dataset_metadata/dataset_related_item_other.py deleted file mode 100644 index 621a4920..00000000 --- a/apis/dataset_metadata/dataset_related_item_other.py +++ /dev/null @@ -1,27 +0,0 @@ -from flask_restx import Resource - -import model -from apis.dataset_metadata_namespace import api - -# dataset_related_item_contributor = api.model( -# "DatasetRelatedItemContributor", -# { -# "id": fields.String(required=True), -# "type": fields.String(required=True), -# "relation_type": fields.String(required=True), -# -# }, -# ) - - -@api.route("/study//dataset//metadata/related_item_other") -class DatasetRelatedItemContributorResource(Resource): - @api.doc("related_item_other") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - # @api.marshal_with(dataset_related_item_contributor) - def get(self, study_id: int, dataset_id: int): - dataset_ = model.Dataset.query.get(dataset_id) - dataset_related_item_ = dataset_.dataset_related_item - return [d.to_dict() for d in dataset_related_item_] diff --git a/apis/dataset_metadata/dataset_related_item_title.py b/apis/dataset_metadata/dataset_related_item_title.py deleted file mode 100644 index 1bc6d243..00000000 --- a/apis/dataset_metadata/dataset_related_item_title.py +++ /dev/null @@ -1,27 +0,0 @@ -from flask_restx import Resource - -import model -from apis.dataset_metadata_namespace import api - -# dataset_related_item_contributor = api.model( -# "DatasetRelatedItemTitle", -# { -# "id": fields.String(required=True), -# "type": fields.String(required=True), -# "relation_type": fields.String(required=True), -# -# }, -# ) - - -@api.route("/study//dataset//metadata/related_item_title") -class DatasetRelatedItemTitleResource(Resource): - @api.doc("related_item_title") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - # @api.marshal_with(dataset_related_item_contributor) - def get(self, study_id: int, dataset_id: int): - dataset_ = model.Dataset.query.get(dataset_id) - dataset_related_item_title_ = dataset_.dataset_related_item_title - return [d.to_dict() for d in dataset_related_item_title_] diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index bdfa724e..350589be 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -4,6 +4,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_rights = api.model( @@ -25,23 +26,53 @@ class DatasetRightsResource(Resource): @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_rights) - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_rights_ = dataset_.dataset_rights return [d.to_dict() for d in dataset_rights_] + @api.doc("update rights") + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) - dataset_rights_ = model.DatasetRights.from_data(data_obj, data) - model.db.session.add(dataset_rights_) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_rights_ = model.DatasetRights.query.get(i["id"]) + if not dataset_rights_: + return f"Study link {i['id']} Id is not found", 404 + dataset_rights_.update(i) + list_of_elements.append(dataset_rights_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_rights_ = model.DatasetRights.from_data(data_obj, i) + model.db.session.add(dataset_rights_) + list_of_elements.append(dataset_rights_.to_dict()) model.db.session.commit() - return dataset_rights_.to_dict() - - @api.route("/study//dataset//metadata/rights/") - class DatasetRightsUpdate(Resource): - def put(self, study_id: int, dataset_id: int, rights_id: int): - dataset_rights_ = model.DatasetRights.query.get(rights_id) - dataset_rights_.update(request.json) - model.db.session.commit() - return dataset_rights_.to_dict() + return list_of_elements + + +@api.route("/study//dataset//metadata/rights/") +class DatasetRightsUpdate(Resource): + @api.doc("delete rights") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + rights_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_rights_ = model.DatasetRights.query.get(rights_id) + + model.db.session.delete(dataset_rights_) + model.db.session.commit() + + return 204 diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index b1fbfe9a..256aff56 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -4,6 +4,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_subject = api.model( @@ -26,23 +27,53 @@ class DatasetSubjectResource(Resource): @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_subject) - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_subject_ = dataset_.dataset_subject return [d.to_dict() for d in dataset_subject_] + @api.doc("update subject") + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can't modify dataset metadata", 403 data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) - dataset_subject_ = model.DatasetSubject.from_data(data_obj, data) - model.db.session.add(dataset_subject_) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_subject_ = model.DatasetSubject.query.get(i["id"]) + if not dataset_subject_: + return f"Study link {i['id']} Id is not found", 404 + dataset_subject_.update(i) + list_of_elements.append(dataset_subject_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_subject_ = model.DatasetSubject.from_data(data_obj, i) + model.db.session.add(dataset_subject_) + list_of_elements.append(dataset_subject_.to_dict()) model.db.session.commit() - return dataset_subject_.to_dict() - - @api.route("/study//dataset//metadata/subject/") - class DatasetSubjectUpdate(Resource): - def put(self, study_id: int, dataset_id: int, subject_id: int): - dataset_subject_ = model.DatasetSubject.query.get(subject_id) - dataset_subject_.update(request.json) - model.db.session.commit() - return dataset_subject_.to_dict() + return list_of_elements + + +@api.route("/study//dataset//metadata/subject/") +class DatasetSubjectUpdate(Resource): + @api.doc("delete subject") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, # pylint: disable= unused-argument + dataset_id: int, # pylint: disable= unused-argument + subject_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can't make change in dataset metadata", 403 + dataset_subject_ = model.DatasetSubject.query.get(subject_id) + + model.db.session.delete(dataset_subject_) + model.db.session.commit() + + return 204 diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 63b7602e..f452506c 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -4,6 +4,7 @@ from flask_restx import Resource, fields import model +from apis.authentication import is_granted from apis.dataset_metadata_namespace import api dataset_title = api.model( @@ -23,21 +24,37 @@ class DatasetTitleResource(Resource): @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_title) - def get(self, study_id: int, dataset_id: int): + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_title_ = dataset_.dataset_title return [d.to_dict() for d in dataset_title_] + @api.doc("update title") + @api.response(200, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: if "id" in i and i["id"]: dataset_title_ = model.DatasetTitle.query.get(i["id"]) + # if dataset_title_.type == "MainTitle": + # return ( + # "Main Title type can not be modified", + # 403, + # dataset_title_.update(i) list_of_elements.append(dataset_title_.to_dict()) elif "id" not in i or not i["id"]: + if i["type"] == "MainTitle": + return ( + "MainTitle type can not be given", + 403, + ) dataset_title_ = model.DatasetTitle.from_data(data_obj, i) model.db.session.add(dataset_title_) list_of_elements.append(dataset_title_.to_dict()) @@ -46,8 +63,27 @@ def post(self, study_id: int, dataset_id: int): @api.route("/study//dataset//metadata/title/") class DatasetDescriptionUpdate(Resource): - def delete(self, study_id: int, dataset_id: int, title_id: int): + @api.doc("delete title") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + title_id: int, + ): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not make any change in dataset metadata", + 403, + ) dataset_title_ = model.DatasetTitle.query.get(title_id) + if dataset_title_.type == "MainTitle": + return ( + "MainTitle type can not be deleted", + 403, + ) model.db.session.delete(dataset_title_) model.db.session.commit() - return dataset_title_.to_dict() + return 204 diff --git a/apis/study.py b/apis/study.py index ce5fc4a9..f05e697e 100644 --- a/apis/study.py +++ b/apis/study.py @@ -120,7 +120,6 @@ def put(self, study_id: int): update_study = model.Study.query.get(study_id) if not is_granted("update_study", update_study): return "Access denied, you can not modify", 403 - update_study.update(request.json) model.db.session.commit() return update_study.to_dict() @@ -132,18 +131,16 @@ def delete(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_study", study): return "Access denied, you can not delete study", 403 - for d in study.dataset: - for version in d.dataset_versions: - version.participants.clear() - for d in study.dataset: - for version in d.dataset_versions: - model.db.session.delete(version) - model.db.session.delete(d) - for p in study.participants: - model.db.session.delete(p) + # for d in study.dataset: + # for version in d.dataset_versions: + # version.participants.clear() + # for d in study.dataset: + # for version in d.dataset_versions: + # model.db.session.delete(version) + # model.db.session.delete(d) + # for p in study.participants: + # model.db.session.delete(p) model.db.session.delete(study) model.db.session.commit() - studies = model.Study.query.filter( - model.Study.study_contributors.any(model.User.id == g.user.id) - ).all() - return [s.to_dict() for s in studies], 201 + + return 204 diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 194aef99..b137b708 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -77,18 +77,19 @@ def post(self, study_id: int): except ValidationError as e: return e.message, 400 - data: typing.Union[dict, typing.Any] = request.json - study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not delete study", 403 + + data: typing.Union[dict, typing.Any] = request.json + identifiers = [i for i in study_obj.study_identification if not i.secondary] + primary_identifier = identifiers[0] if len(identifiers) else None + primary: dict = data["primary"] - primary["secondary"] = False - if "id" in primary and primary["id"]: - study_identification_ = model.StudyIdentification.query.get(primary["id"]) - study_identification_.update(primary) - elif "id" not in primary or not primary["id"]: + if primary_identifier: + primary_identifier.update(primary) + else: study_identification_ = model.StudyIdentification.from_data( study_obj, primary, False ) @@ -96,11 +97,10 @@ def post(self, study_id: int): for i in data["secondary"]: i["secondary"] = True - if "id" in i and i["id"]: study_identification_ = model.StudyIdentification.query.get(i["id"]) study_identification_.update(i) - elif "id" not in i or not i["id"]: + else: study_identification_ = model.StudyIdentification.from_data( study_obj, i, True ) @@ -108,9 +108,9 @@ def post(self, study_id: int): model.db.session.commit() - identifiers = model.Identifiers(study_obj) + final_identifiers = model.Identifiers(study_obj) - return identifiers.to_dict() + return final_identifiers.to_dict() @api.route("/study//metadata/identification/") class StudyIdentificationdUpdate(Resource): @@ -121,10 +121,10 @@ def delete(self, study_id: int, identification_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): return "Access denied, you can not delete study", 403 + study_identification_ = model.StudyIdentification.query.get( identification_id ) - if not study_identification_.secondary: return 400, "primary identifier can not be deleted" diff --git a/app.py b/app.py index c77f6f23..fe69d442 100644 --- a/app.py +++ b/app.py @@ -44,8 +44,11 @@ def create_app(config_module=None): app.config.from_prefixed_env("FAIRHUB") # print(app.config) - if config.FAIRHUB_SECRET and len(config.FAIRHUB_SECRET) < 14: - raise RuntimeError("secret key should contain at least 14 characters") + if config.FAIRHUB_SECRET: + if len(config.FAIRHUB_SECRET) < 32: + raise RuntimeError("FAIRHUB_SECRET must be at least 32 characters long") + else: + raise RuntimeError("FAIRHUB_SECRET not set") if "DATABASE_URL" in app.config: # if "TESTING" in app_config and app_config["TESTING"]: diff --git a/model/__init__.py b/model/__init__.py index 83ec4d68..5b91a29a 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,3 +1,4 @@ +from model.dataset_metadata.dataset_contributor import DatasetContributor from model.dataset_metadata.dataset_related_item import DatasetRelatedItem from model.dataset_metadata.dataset_related_item_contributor import ( DatasetRelatedItemContributor, @@ -9,28 +10,23 @@ from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle from .dataset import Dataset -from .dataset_contributor import DatasetContributor from .dataset_metadata.dataset_access import DatasetAccess from .dataset_metadata.dataset_alternate_identifier import DatasetAlternateIdentifier from .dataset_metadata.dataset_consent import DatasetConsent -from .dataset_metadata.dataset_contributor_affiliation import ( - DatasetContributorAffiliation, -) from .dataset_metadata.dataset_date import DatasetDate from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder -from .dataset_metadata.dataset_managing_organization import DatasetManagingOrganization from .dataset_metadata.dataset_other import DatasetOther from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights from .dataset_metadata.dataset_subject import DatasetSubject from .dataset_metadata.dataset_title import DatasetTitle -from .dataset_versions import DatasetVersions from .db import db from .email_verification import EmailVerification from .invited_study_contributor import StudyInvitedContributor +from .notification import Notification from .participant import Participant from .study import Study, StudyException from .study_contributor import StudyContributor @@ -56,13 +52,13 @@ from .user import User from .user_details import UserDetails from .version import Version +from .version_readme import VersionReadme __all__ = [ "Study", "Dataset", - "DatasetVersions", - "Version", "Participant", + "Version", "db", "User", "DatasetContributor", @@ -70,13 +66,10 @@ "DatasetOther", "DatasetAccess", "DatasetConsent", - "DatasetContributorAffiliation", "DatasetDate", "DatasetDeIdentLevel", - "DatasetContributorAffiliation", "DatasetFunder", "DatasetAlternateIdentifier", - "DatasetManagingOrganization", "DatasetRights", "DatasetReadme", "DatasetRecordKeys", @@ -111,4 +104,6 @@ "EmailVerification", "TokenBlacklist", "UserDetails", + "Notification", + "VersionReadme", ] diff --git a/model/dataset.py b/model/dataset.py index db43a567..98cdc078 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -16,6 +16,16 @@ def __init__(self, study): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.dataset_access = model.DatasetAccess(self) + self.dataset_record_keys = model.DatasetRecordKeys(self) + self.dataset_de_ident_level = model.DatasetDeIdentLevel(self) + self.dataset_consent = model.DatasetConsent(self) + self.dataset_readme = model.DatasetReadme(self) + self.dataset_other = model.DatasetOther(self) + + self.dataset_title.append(model.DatasetTitle(self)) + self.dataset_description.append(model.DatasetDescription(self)) + __tablename__ = "dataset" id = db.Column(db.CHAR(36), primary_key=True) updated_on = db.Column(db.BigInteger, nullable=False) @@ -37,16 +47,17 @@ def __init__(self, study): lazy="dynamic", cascade="all, delete", ) - dataset_access = db.relationship( "DatasetAccess", back_populates="dataset", cascade="all, delete", + uselist=False, ) dataset_consent = db.relationship( "DatasetConsent", back_populates="dataset", cascade="all, delete", + uselist=False, ) dataset_date = db.relationship( "DatasetDate", @@ -55,6 +66,7 @@ def __init__(self, study): ) dataset_de_ident_level = db.relationship( "DatasetDeIdentLevel", + uselist=False, back_populates="dataset", cascade="all, delete", ) @@ -74,18 +86,30 @@ def __init__(self, study): back_populates="dataset", cascade="all, delete", ) - dataset_managing_organization = db.relationship( - "DatasetManagingOrganization", back_populates="dataset" + dataset_other = db.relationship( + "DatasetOther", back_populates="dataset", uselist=False, cascade="all, delete" + ) + dataset_readme = db.relationship( + "DatasetReadme", back_populates="dataset", uselist=False, cascade="all, delete" + ) + dataset_record_keys = db.relationship( + "DatasetRecordKeys", + back_populates="dataset", + uselist=False, + cascade="all, delete", ) - dataset_other = db.relationship("DatasetOther", back_populates="dataset") - dataset_readme = db.relationship("DatasetReadme", back_populates="dataset") - dataset_record_keys = db.relationship("DatasetRecordKeys", back_populates="dataset") dataset_related_item = db.relationship( - "DatasetRelatedItem", back_populates="dataset" + "DatasetRelatedItem", back_populates="dataset", cascade="all, delete" + ) + dataset_rights = db.relationship( + "DatasetRights", back_populates="dataset", cascade="all, delete" + ) + dataset_subject = db.relationship( + "DatasetSubject", back_populates="dataset", cascade="all, delete" + ) + dataset_title = db.relationship( + "DatasetTitle", back_populates="dataset", cascade="all, delete" ) - dataset_rights = db.relationship("DatasetRights", back_populates="dataset") - dataset_subject = db.relationship("DatasetSubject", back_populates="dataset") - dataset_title = db.relationship("DatasetTitle", back_populates="dataset") def to_dict(self): last_published = self.last_published() @@ -96,6 +120,7 @@ def to_dict(self): "created_at": self.created_at, # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None, + # "title": self.dataset_title.title if self.dataset_title else "" } def last_published(self): @@ -119,4 +144,6 @@ def from_data(study: Study): def update(self): """Creates a new dataset from a dictionary""" self.updated_on = datetime.datetime.now(timezone.utc).timestamp() - # self.dataset_versions = data["dataset_versions"] + + def touch_dataset(self): + self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() diff --git a/model/dataset_contributor.py b/model/dataset_contributor.py deleted file mode 100644 index a84bba52..00000000 --- a/model/dataset_contributor.py +++ /dev/null @@ -1,53 +0,0 @@ -import uuid - -from .db import db - - -class DatasetContributor(db.Model): # type: ignore - def __init__(self): - self.id = str(uuid.uuid4()) - - __tablename__ = "dataset_contributor" - id = db.Column(db.CHAR(36), primary_key=True) - first_name = db.Column(db.String, nullable=False) - last_name = db.Column(db.String, nullable=False) - name_type = db.Column(db.String, nullable=False) - name_identifier = db.Column(db.String, nullable=False) - name_identifier_scheme = db.Column(db.String, nullable=False) - name_identifier_scheme_uri = db.Column(db.String, nullable=False) - creator = db.Column(db.BOOLEAN, nullable=False) - contributor_type = db.Column(db.String, nullable=False) - - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) - dataset = db.relationship("Dataset", back_populates="dataset_contributors") - dataset_contributor_affiliation = db.relationship( - "DatasetContributorAffiliation", back_populates="dataset_contributors" - ) - - def to_dict(self): - return { - "id": self.id, - "first_name": self.first_name, - "last_name": self.last_name, - "name_type": self.name_type, - "name_identifier": self.name_identifier, - "name_identifier_scheme": self.name_identifier_scheme, - "name_identifier_scheme_uri": self.name_identifier_scheme_uri, - "creator": self.creator, - "contributor_type": self.contributor_type, - } - - @staticmethod - def from_data(data: dict): - dataset_contributor = DatasetContributor() - dataset_contributor.first_name = data["first_name"] - dataset_contributor.last_name = data["last_name"] - dataset_contributor.name_type = data["name_type"] - dataset_contributor.name_identifier = data["name_identifier"] - dataset_contributor.name_identifier_scheme = data["name_identifier_scheme"] - dataset_contributor.name_identifier_scheme_uri = data[ - "name_identifier_scheme_uri" - ] - dataset_contributor.creator = data["creator"] - dataset_contributor.contributor_type = data["contributor_type"] - return dataset_contributor diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 36f29310..577a5e14 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -1,5 +1,3 @@ -import uuid - from model import Dataset from ..db import db @@ -7,26 +5,29 @@ class DatasetAccess(db.Model): # type: ignore def __init__(self, dataset): - self.id = str(uuid.uuid4()) self.dataset = dataset + self.type = None + self.description = "" + self.url = "" + self.url_last_checked = None __tablename__ = "dataset_access" - id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) description = db.Column(db.String, nullable=False) url = db.Column(db.String, nullable=False) - url_last_checked = db.Column(db.String, nullable=False) + url_last_checked = db.Column(db.BigInteger, nullable=True) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) dataset = db.relationship("Dataset", back_populates="dataset_access") def to_dict(self): return { - "id": self.id, "type": self.type, "description": self.description, - "url": self.url, "url_last_checked": self.url_last_checked, + "url": self.url, } @staticmethod @@ -40,3 +41,4 @@ def update(self, data: dict): self.url = data["url"] self.url_last_checked = data["url_last_checked"] self.type = data["type"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index b8069d41..1b2ebb70 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,11 +9,13 @@ class DatasetAlternateIdentifier(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset_alternate_identifier" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) - identifier_type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_alternate_identifier") @@ -20,7 +24,8 @@ def to_dict(self): return { "id": self.id, "identifier": self.identifier, - "identifier_type": self.identifier_type, + "type": self.type, + "created_at": self.created_at, } @staticmethod @@ -31,4 +36,5 @@ def from_data(dataset, data: dict): def update(self, data: dict): self.identifier = data["identifier"] - self.identifier_type = data["identifier_type"] + self.type = data["type"] if "type" in data else "" + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 69d1e5ce..2258ea5d 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -1,17 +1,20 @@ -import uuid - from ..db import db class DatasetConsent(db.Model): # type: ignore def __init__(self, dataset): self.dataset = dataset - self.id = str(uuid.uuid4()) + self.type = None + self.noncommercial = True + self.geog_restrict = True + self.research_type = True + self.genetic_only = True + self.no_methods = True + self.details = "" __tablename__ = "dataset_consent" - id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) noncommercial = db.Column(db.BOOLEAN, nullable=False) geog_restrict = db.Column(db.BOOLEAN, nullable=False) research_type = db.Column(db.BOOLEAN, nullable=False) @@ -19,12 +22,13 @@ def __init__(self, dataset): no_methods = db.Column(db.BOOLEAN, nullable=False) details = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) dataset = db.relationship("Dataset", back_populates="dataset_consent") def to_dict(self): return { - "id": self.id, "type": self.type, "noncommercial": self.noncommercial, "geog_restrict": self.geog_restrict, @@ -48,3 +52,4 @@ def update(self, data: dict): self.genetic_only = data["genetic_only"] self.no_methods = data["no_methods"] self.details = data["details"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py new file mode 100644 index 00000000..6d4f54d7 --- /dev/null +++ b/model/dataset_metadata/dataset_contributor.py @@ -0,0 +1,58 @@ +import datetime +import uuid +from datetime import timezone + +from model.db import db + + +class DatasetContributor(db.Model): # type: ignore + def __init__(self, dataset): + self.id = str(uuid.uuid4()) + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.dataset = dataset + + __tablename__ = "dataset_contributor" + id = db.Column(db.CHAR(36), primary_key=True) + name = db.Column(db.String, nullable=False) + name_type = db.Column(db.String, nullable=True) + name_identifier = db.Column(db.String, nullable=False) + name_identifier_scheme = db.Column(db.String, nullable=False) + name_identifier_scheme_uri = db.Column(db.String, nullable=False) + creator = db.Column(db.BOOLEAN, nullable=False) + contributor_type = db.Column(db.String, nullable=True) + affiliations = db.Column(db.JSON, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset = db.relationship("Dataset", back_populates="dataset_contributors") + + def to_dict(self): + return { + "id": self.id, + "name": self.name, + "name_type": self.name_type, + "name_identifier": self.name_identifier, + "name_identifier_scheme": self.name_identifier_scheme, + "name_identifier_scheme_uri": self.name_identifier_scheme_uri, + "creator": self.creator, + "contributor_type": self.contributor_type, + "affiliations": self.affiliations, + "created_at": self.created_at, + } + + @staticmethod + def from_data(dataset, data: dict): + dataset_contributor = DatasetContributor(dataset) + dataset_contributor.update(data) + return dataset_contributor + + def update(self, data: dict): + self.name = data["name"] + self.name_type = data["name_type"] + self.name_identifier = data["name_identifier"] + self.name_identifier_scheme = data["name_identifier_scheme"] + self.name_identifier_scheme_uri = data["name_identifier_scheme_uri"] + self.creator = data["creator"] + self.contributor_type = data["contributor_type"] + self.affiliations = data["affiliations"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_contributor_affiliation.py b/model/dataset_metadata/dataset_contributor_affiliation.py index dec9a7c6..045cfc55 100644 --- a/model/dataset_metadata/dataset_contributor_affiliation.py +++ b/model/dataset_metadata/dataset_contributor_affiliation.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,16 +9,19 @@ class DatasetContributorAffiliation(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset_contributor_affiliation" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) identifier_scheme = db.Column(db.String, nullable=False) identifier_scheme_uri = db.Column(db.String, nullable=False) - dataset_contributors = db.relationship( - "DatasetContributor", back_populates="dataset_contributor_affiliation" - ) - dataset_contributor_id = db.Column( + created_at = db.Column(db.BigInteger, nullable=False) + + # dataset_contributors = db.relationship( + # "DatasetContributor", back_populates="dataset_contributor_affiliation" + # ) + contributor_id = db.Column( db.String, db.ForeignKey("dataset_contributor.id"), nullable=False ) @@ -26,6 +31,7 @@ def to_dict(self): "identifier": self.identifier, "identifier_scheme": self.identifier_scheme, "identifier_scheme_uri": self.identifier_scheme_uri, + "created_at": self.created_at, } @staticmethod @@ -38,3 +44,4 @@ def update(self, data: dict): self.identifier = data["identifier"] self.identifier_scheme = data["identifier_scheme"] self.identifier_scheme_uri = data["identifier_scheme_uri"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index d8f9a3f3..c58a1741 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,12 +9,15 @@ class DatasetDate(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset_date" id = db.Column(db.CHAR(36), primary_key=True) - date = db.Column(db.String, nullable=False) - date_type = db.Column(db.String, nullable=False) - data_information = db.Column(db.String, nullable=False) + + date = db.Column(db.BigInteger, nullable=True) + type = db.Column(db.String, nullable=True) + information = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_date") @@ -21,8 +26,9 @@ def to_dict(self): return { "id": self.id, "date": self.date, - "date_type": self.date_type, - "data_information": self.data_information, + "type": self.type, + "information": self.information, + "created_at": self.created_at, } @staticmethod @@ -33,5 +39,6 @@ def from_data(dataset, data: dict): def update(self, data: dict): self.date = data["date"] - self.date_type = data["date_type"] - self.data_information = data["data_information"] + self.type = data["type"] + self.information = data["information"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index 1f63d2bc..4d8e1cc1 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -1,17 +1,20 @@ -import uuid - from ..db import db class DatasetDeIdentLevel(db.Model): # type: ignore def __init__(self, dataset): - self.id = str(uuid.uuid4()) self.dataset = dataset + self.type = None + self.direct = False + self.hipaa = False + self.dates = False + self.nonarr = False + self.k_anon = False + self.details = "" __tablename__ = "dataset_de_ident_level" - id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) direct = db.Column(db.BOOLEAN, nullable=False) hipaa = db.Column(db.BOOLEAN, nullable=False) dates = db.Column(db.BOOLEAN, nullable=False) @@ -19,12 +22,13 @@ def __init__(self, dataset): k_anon = db.Column(db.BOOLEAN, nullable=False) details = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) dataset = db.relationship("Dataset", back_populates="dataset_de_ident_level") def to_dict(self): return { - "id": self.id, "type": self.type, "direct": self.direct, "hipaa": self.hipaa, @@ -48,3 +52,4 @@ def update(self, data: dict): self.nonarr = data["nonarr"] self.k_anon = data["k_anon"] self.details = data["details"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index e07504ec..97f3a8a2 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,11 +9,15 @@ class DatasetDescription(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.description = "" + self.type = "Abstract" __tablename__ = "dataset_description" id = db.Column(db.CHAR(36), primary_key=True) description = db.Column(db.String, nullable=False) - description_type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_description") @@ -20,7 +26,8 @@ def to_dict(self): return { "id": self.id, "description": self.description, - "description_type": self.description_type, + "type": self.type, + "created_at": self.created_at, } @staticmethod @@ -31,4 +38,5 @@ def from_data(dataset, data: dict): def update(self, data: dict): self.description = data["description"] - self.description_type = data["description_type"] + self.type = data["type"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index b4c96a4c..90c45551 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,16 +9,18 @@ class DatasetFunder(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset_funder" id = db.Column(db.CHAR(36), primary_key=True) name = db.Column(db.String, nullable=False) identifier = db.Column(db.String, nullable=False) - identifier_type = db.Column(db.String, nullable=False) + identifier_type = db.Column(db.String, nullable=True) identifier_scheme_uri = db.Column(db.String, nullable=False) award_number = db.Column(db.String, nullable=False) award_uri = db.Column(db.String, nullable=False) award_title = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_funder") @@ -31,6 +35,7 @@ def to_dict(self): "award_number": self.award_number, "award_uri": self.award_uri, "award_title": self.award_title, + "created_at": self.created_at, } @staticmethod @@ -47,3 +52,4 @@ def update(self, data: dict): self.award_number = data["award_number"] self.award_uri = data["award_uri"] self.award_title = data["award_title"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py deleted file mode 100644 index 76548f26..00000000 --- a/model/dataset_metadata/dataset_managing_organization.py +++ /dev/null @@ -1,35 +0,0 @@ -import uuid - -from ..db import db - - -class DatasetManagingOrganization(db.Model): # type: ignore - def __init__(self, dataset): - self.id = str(uuid.uuid4()) - self.dataset = dataset - - __tablename__ = "dataset_managing_organization" - id = db.Column(db.CHAR(36), primary_key=True) - - name = db.Column(db.String, nullable=False) - ror_id = db.Column(db.String, nullable=False) - - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) - dataset = db.relationship("Dataset", back_populates="dataset_managing_organization") - - def to_dict(self): - return { - "id": self.id, - "name": self.name, - "ror_id": self.ror_id, - } - - @staticmethod - def from_data(dataset, data: dict): - dataset_managing_organization = DatasetManagingOrganization(dataset) - dataset_managing_organization.update(data) - return dataset_managing_organization - - def update(self, data: dict): - self.name = data["name"] - self.ror_id = data["ror_id"] diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index baafeba5..00694625 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -1,5 +1,3 @@ -import uuid - from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -8,31 +6,42 @@ class DatasetOther(db.Model): # type: ignore def __init__(self, dataset): - self.id = str(uuid.uuid4()) self.dataset = dataset + self.resource_type = "" + self.language = None + self.managing_organization_name = "" + self.managing_organization_ror_id = "" + self.size = "" + self.standards_followed = "" + self.acknowledgement = "" + self.publisher = "" __tablename__ = "dataset_other" - id = db.Column(db.CHAR(36), primary_key=True) - language = db.Column(db.String, nullable=False) + resource_type = db.Column(db.String, nullable=False) + language = db.Column(db.String, nullable=True) managing_organization_name = db.Column(db.String, nullable=False) managing_organization_ror_id = db.Column(db.String, nullable=False) size = db.Column(ARRAY(String), nullable=False) standards_followed = db.Column(db.String, nullable=False) acknowledgement = db.Column(db.String, nullable=False) + publisher = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) dataset = db.relationship("Dataset", back_populates="dataset_other") def to_dict(self): return { - "id": self.id, "language": self.language, "managing_organization_name": self.managing_organization_name, "managing_organization_ror_id": self.managing_organization_ror_id, "standards_followed": self.managing_organization_ror_id, "acknowledgement": self.acknowledgement, "size": self.size, + "publisher": self.publisher, + "resource_type": self.resource_type, } @staticmethod @@ -42,9 +51,20 @@ def from_data(dataset, data: dict): return dataset_other def update(self, data: dict): - self.language = data["language"] - self.managing_organization_name = data["managing_organization_name"] - self.managing_organization_ror_id = data["managing_organization_ror_id"] - self.size = data["size"] - self.acknowledgement = data["acknowledgement"] - self.standards_followed = data["standards_followed"] + if "language" in data: + self.language = data["language"] + if "managing_organization_name" in data: + self.managing_organization_name = data["managing_organization_name"] + if "managing_organization_ror_id" in data: + self.managing_organization_ror_id = data["managing_organization_ror_id"] + if "size" in data: + self.size = data["size"] + if "acknowledgement" in data: + self.acknowledgement = data["acknowledgement"] + if "standards_followed" in data: + self.standards_followed = data["standards_followed"] + if "publisher" in data: + self.publisher = data["publisher"] + if "resource_type" in data: + self.resource_type = data["resource_type"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index 889d73d2..b443c95b 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -1,23 +1,21 @@ -import uuid - from ..db import db class DatasetReadme(db.Model): # type: ignore def __init__(self, dataset): - self.id = str(uuid.uuid4()) self.dataset = dataset + self.content = "" __tablename__ = "dataset_readme" - id = db.Column(db.CHAR(36), primary_key=True) content = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) dataset = db.relationship("Dataset", back_populates="dataset_readme") def to_dict(self): return { - "id": self.id, "content": self.content, } @@ -29,3 +27,4 @@ def from_data(dataset, data: dict): def update(self, data: dict): self.content = data["content"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 0b48818f..993af3f2 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -1,24 +1,23 @@ -import uuid - from ..db import db class DatasetRecordKeys(db.Model): # type: ignore def __init__(self, dataset): - self.id = str(uuid.uuid4()) self.dataset = dataset + self.key_type = None + self.key_details = "" __tablename__ = "dataset_record_keys" - id = db.Column(db.CHAR(36), primary_key=True) - key_type = db.Column(db.String, nullable=False) + key_type = db.Column(db.String, nullable=True) key_details = db.Column(db.String, nullable=False) - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) dataset = db.relationship("Dataset", back_populates="dataset_record_keys") def to_dict(self): return { - "id": self.id, "key_type": self.key_type, "key_details": self.key_details, } @@ -30,5 +29,6 @@ def from_data(dataset, data: dict): return dataset_record_keys def update(self, data: dict): - self.key_type = data["key_type"] - self.key_details = data["key_details"] + self.key_type = data["type"] + self.key_details = data["details"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index 477c1ab5..579bed63 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -1,4 +1,8 @@ +import datetime import uuid +from datetime import timezone + +import model from ..db import db @@ -7,32 +11,90 @@ class DatasetRelatedItem(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.dataset_related_item_other = model.DatasetRelatedItemOther(self) __tablename__ = "dataset_related_item" + id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=False) - relation_type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) + relation_type = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_related_item") + dataset_related_item_contributor = db.relationship( - "DatasetRelatedItemContributor", back_populates="dataset_related_item" + "DatasetRelatedItemContributor", + back_populates="dataset_related_item", + cascade="all, delete", ) dataset_related_item_identifier = db.relationship( - "DatasetRelatedItemIdentifier", back_populates="dataset_related_item" + "DatasetRelatedItemIdentifier", + back_populates="dataset_related_item", + cascade="all, delete", ) dataset_related_item_other = db.relationship( - "DatasetRelatedItemOther", back_populates="dataset_related_item" + "DatasetRelatedItemOther", + back_populates="dataset_related_item", + uselist=False, + cascade="all, delete", ) dataset_related_item_title = db.relationship( - "DatasetRelatedItemTitle", back_populates="dataset_related_item" + "DatasetRelatedItemTitle", + back_populates="dataset_related_item", + cascade="all, delete", ) def to_dict(self): + sorted_contributors = sorted( + self.dataset_related_item_contributor, + key=lambda creator: creator.created_at, + ) + creators = [c for c in sorted_contributors if c.creator] + + contributors = [c for c in sorted_contributors if not c.creator] return { "id": self.id, "type": self.type, "relation_type": self.relation_type, + "created_at": self.created_at, + "titles": [ + i.to_dict() for i in self.dataset_related_item_title # type: ignore + ], + "creators": [c.to_dict() for c in creators], + "contributors": [c.to_dict() for c in contributors], + "publication_year": self.dataset_related_item_other.publication_year + if self.dataset_related_item_other + else None, + "volume": self.dataset_related_item_other.volume + if self.dataset_related_item_other + else None, + "issue": self.dataset_related_item_other.issue + if self.dataset_related_item_other + else None, + "number_value": self.dataset_related_item_other.number_value + if self.dataset_related_item_other + else None, + "number_type": self.dataset_related_item_other.number_type + if self.dataset_related_item_other + else None, + "first_page": self.dataset_related_item_other.first_page + if self.dataset_related_item_other + else None, + "last_page": self.dataset_related_item_other.last_page + if self.dataset_related_item_other + else None, + "publisher": self.dataset_related_item_other.publisher + if self.dataset_related_item_other + else None, + "edition": self.dataset_related_item_other.edition + if self.dataset_related_item_other + else None, + "identifiers": [ + i.to_dict() + for i in self.dataset_related_item_identifier # type: ignore + ], } @staticmethod @@ -44,3 +106,5 @@ def from_data(dataset, data: dict): def update(self, data: dict): self.type = data["type"] self.relation_type = data["relation_type"] + self.dataset_related_item_other.update(data) + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index db3c3ae6..b38b3651 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -1,19 +1,24 @@ +import datetime import uuid +from datetime import timezone from ..db import db class DatasetRelatedItemContributor(db.Model): # type: ignore - def __init__(self, dataset): + def __init__(self, dataset_related_item, creator): self.id = str(uuid.uuid4()) - self.dataset = dataset + self.dataset_related_item = dataset_related_item + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.creator = creator __tablename__ = "dataset_related_item_contributor" id = db.Column(db.CHAR(36), primary_key=True) name = db.Column(db.String, nullable=False) - name_type = db.Column(db.String, nullable=False) + name_type = db.Column(db.String, nullable=True) creator = db.Column(db.BOOLEAN, nullable=False) - contributor_type = db.Column(db.String, nullable=False) + contributor_type = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) dataset_related_item_id = db.Column( db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False @@ -27,18 +32,19 @@ def to_dict(self): "id": self.id, "name": self.name, "name_type": self.name_type, - "creator": self.creator, "contributor_type": self.contributor_type, + "created_at": self.created_at, } @staticmethod - def from_data(dataset, data: dict): - dataset_related_contributor = DatasetRelatedItemContributor(dataset) - dataset_related_contributor.update(data) - return dataset_related_contributor + def from_data(dataset_related_item, data: dict, creator): + contributor_ = DatasetRelatedItemContributor(dataset_related_item, creator) + contributor_.update(data) + return contributor_ def update(self, data: dict): - self.name = data["name"] - self.name_type = data["name_type"] - self.creator = data["creator"] - self.contributor_type = data["contributor_type"] + self.name = data["name"] if "name" in data else "" + self.name_type = data["name_type"] if "name_type" in data else None + self.contributor_type = ( + data["contributor_type"] if "contributor_type" in data else None + ) diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 6a854900..48b2e548 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -1,19 +1,24 @@ +import datetime import uuid +from datetime import timezone from ..db import db class DatasetRelatedItemIdentifier(db.Model): # type: ignore - def __init__(self): + def __init__(self, dataset_related_item): self.id = str(uuid.uuid4()) + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.dataset_related_item = dataset_related_item __tablename__ = "dataset_related_item_identifier" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=False) - metadata_scheme = db.Column(db.String, nullable=False) - scheme_uri = db.Column(db.String, nullable=False) - scheme_type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) + metadata_scheme = db.Column(db.String, nullable=True) + scheme_uri = db.Column(db.String, nullable=True) + scheme_type = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) dataset_related_item_id = db.Column( db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False @@ -30,14 +35,20 @@ def to_dict(self): "metadata_scheme": self.metadata_scheme, "scheme_uri": self.scheme_uri, "scheme_type": self.scheme_type, + "created_at": self.created_at, } @staticmethod - def from_data(data: dict): - dataset_related_item_identifier = DatasetRelatedItemIdentifier() - dataset_related_item_identifier.identifier = data["identifier"] - dataset_related_item_identifier.type = data["type"] - dataset_related_item_identifier.metadata_scheme = data["metadata_scheme"] - dataset_related_item_identifier.scheme_uri = data["scheme_uri"] - dataset_related_item_identifier.scheme_type = data["scheme_type"] - return dataset_related_item_identifier + def from_data(dataset_related_item, data: dict): + identifier_ = DatasetRelatedItemIdentifier(dataset_related_item) + identifier_.update(data) + return identifier_ + + def update(self, data: dict): + self.identifier = data["identifier"] if "identifier" in data else "" + self.type = data["type"] if "type" in data else None + self.metadata_scheme = ( + data["metadata_scheme"] if "metadata_scheme" in data else "" + ) + self.scheme_uri = data["scheme_uri"] if "scheme_uri" in data else "" + self.scheme_type = data["scheme_type"] if "scheme_type" in data else "" diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py index aba272d3..39cbe02a 100644 --- a/model/dataset_metadata/dataset_related_item_other.py +++ b/model/dataset_metadata/dataset_related_item_other.py @@ -1,27 +1,35 @@ -import uuid - from ..db import db class DatasetRelatedItemOther(db.Model): # type: ignore - def __init__(self, dataset): - self.id = str(uuid.uuid4()) - self.dataset = dataset + def __init__(self, dataset_related_item): + self.dataset_related_item = dataset_related_item + self.publication_year = None + self.volume = "" + self.issue = "" + self.number_value = "" + self.number_type = None + self.first_page = "" + self.last_page = "" + self.publisher = "" + self.edition = "" __tablename__ = "dataset_related_item_other" - id = db.Column(db.CHAR(36), primary_key=True) - publication_year = db.Column(db.String, nullable=False) + publication_year = db.Column(db.BigInteger, nullable=True) volume = db.Column(db.String, nullable=False) issue = db.Column(db.String, nullable=False) number_value = db.Column(db.String, nullable=False) - number_type = db.Column(db.String, nullable=False) + number_type = db.Column(db.String, nullable=True) first_page = db.Column(db.String, nullable=False) - last_page = db.Column(db.BOOLEAN, nullable=False) + last_page = db.Column(db.String, nullable=False) publisher = db.Column(db.String, nullable=False) edition = db.Column(db.String, nullable=False) dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False + db.CHAR(36), + db.ForeignKey("dataset_related_item.id"), + primary_key=True, + nullable=False, ) dataset_related_item = db.relationship( "DatasetRelatedItem", back_populates="dataset_related_item_other" @@ -29,7 +37,6 @@ def __init__(self, dataset): def to_dict(self): return { - "id": self.id, "publication_year": self.publication_year, "volume": self.volume, "issue": self.issue, @@ -39,22 +46,23 @@ def to_dict(self): "last_page": self.last_page, "publisher": self.publisher, "edition": self.edition, - "scheme_type": self.scheme_type, } @staticmethod - def from_data(dataset, data: dict): - dataset_related_item_other = DatasetRelatedItemOther(dataset) + def from_data(dataset_related_item, data: dict): + dataset_related_item_other = DatasetRelatedItemOther(dataset_related_item) dataset_related_item_other.update(data) return dataset_related_item_other def update(self, data: dict): - self.publication_year = data["publication_year"] - self.volume = data["volume"] - self.issue = data["issue"] - self.number_value = data["number_value"] - self.number_type = data["number_type"] - self.first_page = data["first_page"] - self.last_page = data["last_page"] - self.publisher = data["publisher"] - self.edition = data["edition"] + self.publication_year = ( + data["publication_year"] if "publication_year" in data else None + ) + self.volume = data["volume"] if "volume" in data else "" + self.issue = data["issue"] if "issue" in data else "" + self.number_value = data["number_value"] if "number_value" in data else "" + self.number_type = data["number_type"] if "number_type" in data else None + self.first_page = data["first_page"] if "first_page" in data else "" + self.last_page = data["last_page"] if "last_page" in data else "" + self.publisher = data["publisher"] if "publisher" in data else "" + self.edition = data["edition"] if "edition" in data else "" diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index a2355673..2d4ea08d 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -1,17 +1,21 @@ +import datetime import uuid +from datetime import timezone from ..db import db class DatasetRelatedItemTitle(db.Model): # type: ignore - def __init__(self, dataset): + def __init__(self, dataset_related_item): self.id = str(uuid.uuid4()) - self.dataset = dataset + self.dataset_related_item = dataset_related_item + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset_related_item_title" id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) title = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) dataset_related_item_id = db.Column( db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False @@ -25,11 +29,12 @@ def to_dict(self): "id": self.id, "type": self.type, "title": self.title, + "created_at": self.created_at, } @staticmethod - def from_data(dataset, data: dict): - dataset_related_item_title = DatasetRelatedItemTitle(dataset) + def from_data(dataset_related_item, data: dict): + dataset_related_item_title = DatasetRelatedItemTitle(dataset_related_item) dataset_related_item_title.update(data) return dataset_related_item_title diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 2b494c6d..27ad1a63 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,6 +9,7 @@ class DatasetRights(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset_rights" id = db.Column(db.CHAR(36), primary_key=True) @@ -15,6 +18,7 @@ def __init__(self, dataset): uri = db.Column(db.String, nullable=False) identifier = db.Column(db.String, nullable=False) identifier_scheme = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_rights") @@ -26,6 +30,7 @@ def to_dict(self): "uri": self.uri, "identifier": self.identifier, "identifier_scheme": self.identifier_scheme, + "created_at": self.created_at, } @staticmethod @@ -39,3 +44,4 @@ def update(self, data: dict): self.uri = data["uri"] self.identifier = data["identifier"] self.identifier_scheme = data["identifier_scheme"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 12d7d24d..43ea560e 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,6 +9,7 @@ class DatasetSubject(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "dataset_subject" id = db.Column(db.CHAR(36), primary_key=True) @@ -16,6 +19,7 @@ def __init__(self, dataset): scheme_uri = db.Column(db.String, nullable=False) value_uri = db.Column(db.String, nullable=False) classification_code = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_subject") @@ -28,6 +32,7 @@ def to_dict(self): "scheme_uri": self.scheme_uri, "value_uri": self.value_uri, "classification_code": self.classification_code, + "created_at": self.created_at, } @staticmethod @@ -42,3 +47,4 @@ def update(self, data: dict): self.scheme_uri = data["scheme_uri"] self.value_uri = data["value_uri"] self.classification_code = data["classification_code"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index b120cbcc..eff54c9f 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -1,4 +1,6 @@ +import datetime import uuid +from datetime import timezone from ..db import db @@ -7,11 +9,15 @@ class DatasetTitle(db.Model): # type: ignore def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.title = "" + self.type = "MainTitle" __tablename__ = "dataset_title" id = db.Column(db.CHAR(36), primary_key=True) title = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_title") dataset_id = db.Column(db.String, db.ForeignKey("dataset.id"), nullable=False) @@ -21,6 +27,7 @@ def to_dict(self): "id": self.id, "title": self.title, "type": self.type, + "created_at": self.created_at, } @staticmethod @@ -33,3 +40,4 @@ def from_data(dataset, data: dict): def update(self, data: dict): self.title = data["title"] self.type = data["type"] + self.dataset.touch_dataset() diff --git a/model/dataset_versions.py b/model/dataset_versions.py deleted file mode 100644 index 1bff5a82..00000000 --- a/model/dataset_versions.py +++ /dev/null @@ -1,35 +0,0 @@ -from .version import Version - - -class DatasetVersions: - def __init__( - self, - last_published: Version, - last_modified: Version, - id: str, # pylint: disable = redefined-builtin - ): - self.latest_version = last_modified.id - self.published_version = last_published.id - self.last_modified = last_modified.modified - self.last_published = last_published.modified - self.id = id - - def to_dict(self): - return { - "latest_version": self.latest_version, - "published_version": self.published_version, - "last_modified": self.last_modified, - "last_published": self.last_published, - "id": self.id, - } - - @staticmethod - def from_data(data: dict): - dataset_versions = DatasetVersions( - id=data["id"], - last_published=data["last_published"], - last_modified=data["last_modified"], - ) - dataset_versions.latest_version = data["latest_version"] - dataset_versions.published_version = data["published_version"] - return dataset_versions diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 0b360288..2bd8edab 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -35,4 +35,5 @@ def to_dict(self): "status": "invited", "role": self.permission, "email_address": self.email_address, + "token": self.token, } diff --git a/model/notification.py b/model/notification.py new file mode 100644 index 00000000..b123c188 --- /dev/null +++ b/model/notification.py @@ -0,0 +1,47 @@ +import datetime +import uuid +from datetime import timezone + +from .db import db + + +class Notification(db.Model): # type: ignore + def __init__(self): + self.id = str(uuid.uuid4()) + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + + __tablename__ = "notification" + id = db.Column(db.CHAR(36), primary_key=True) + title = db.Column(db.String, nullable=True) + message = db.Column(db.String, nullable=True) + type = db.Column(db.String, nullable=True) + target = db.Column(db.String, nullable=True) + read = db.Column(db.BOOLEAN, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) + + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + user = db.relationship("User", back_populates="notification") + + def to_dict(self): + return { + "id": self.id, + "title": self.title, + "message": self.message, + "type": self.type, + "target": self.target, + "read": self.read, + "created_at": self.created_at, + } + + @staticmethod + def from_data(data: dict): + user = Notification() + user.update(data) + return user + + def update(self, data: dict): + self.title = data["title"] + self.message = data["message"] + self.type = data["type"] + self.target = data["target"] + self.read = data["read"] diff --git a/model/participant.py b/model/participant.py index 669b1834..b4ba46a7 100644 --- a/model/participant.py +++ b/model/participant.py @@ -2,10 +2,9 @@ import uuid from datetime import timezone -import model - from .db import db from .study import Study +from .version import version_participants class Participant(db.Model): # type: ignore @@ -30,7 +29,7 @@ def __init__(self, study): dataset_versions = db.relationship( "Version", back_populates="participants", - secondary=model.version.version_participants, + secondary=version_participants, ) def to_dict(self): diff --git a/model/study.py b/model/study.py index a8546fff..a919bbae 100644 --- a/model/study.py +++ b/model/study.py @@ -8,8 +8,6 @@ from .db import db -# from datetime import datetime, timezone - class StudyException(Exception): pass @@ -28,6 +26,7 @@ def __init__(self): self.study_eligibility = model.StudyEligibility(self) self.study_ipdsharing = model.StudyIpdsharing(self) self.study_description = model.StudyDescription(self) + self.study_identification.append(model.StudyIdentification(self, False)) self.study_other = model.StudyOther(self) # self.study_contributors = model.StudyContributor(self) diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index 18b9e6b5..cf9e14fa 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -10,7 +10,7 @@ def __init__(self, study: Study): def to_dict(self): sorted_study_identifications = sorted( self.study.study_identification, - key=lambda identifier: identifier.created_at, + key=lambda i: i.created_at, ) return { "primary": [ @@ -26,12 +26,11 @@ def to_dict(self): ] ) != 0 # noqa: W503 - else [], + else None, "secondary": [ identifier.to_dict() for identifier in sorted_study_identifications if identifier.secondary ], } - # sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at, reverse=True) # return [s.to_dict() for s in sorted_study_reference] diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 2a6e8637..cd65d626 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -19,7 +19,7 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) url = db.Column(db.String, nullable=False) comment = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index a39f72e0..c13d8edf 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -1,5 +1,3 @@ -import uuid - from model import Study from ..db import db @@ -9,26 +7,26 @@ class StudyDescription(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study: Study): - self.id = str(uuid.uuid4()) self.study = study self.brief_summary = "" self.detailed_description = "" __tablename__ = "study_description" - id = db.Column(db.CHAR(36), primary_key=True) brief_summary = db.Column(db.String, nullable=False) detailed_description = db.Column(db.String, nullable=False) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, ) study = db.relationship("Study", back_populates="study_description") def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, "brief_summary": self.brief_summary, "detailed_description": self.detailed_description, } diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index d09eb5e2..585bf1f6 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -1,5 +1,3 @@ -import uuid - from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -12,42 +10,40 @@ class StudyDesign(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study: Study): - self.id = str(uuid.uuid4()) self.study = study - self.design_allocation = "" - self.study_type = "" - self.design_intervention_model = "" + self.design_allocation = None + self.study_type = None + self.design_intervention_model = None self.design_intervention_model_description = "" - self.design_primary_purpose = "" - self.design_masking = "" - self.design_masking_description = "" + self.design_primary_purpose = None + self.design_masking = None + self.design_masking_description = None self.design_who_masked_list = [] - self.phase_list = [] - self.enrollment_count = 0 - self.enrollment_type = "" - self.number_arms = 0 - self.design_observational_model_list = [] - self.design_time_perspective_list = [] - self.bio_spec_retention = "" - self.bio_spec_description = "" - self.target_duration = "" - self.number_groups_cohorts = 0 + self.phase_list = None + self.enrollment_count = None + self.enrollment_type = None + self.number_arms = None + self.design_observational_model_list = None + self.design_time_perspective_list = None + self.bio_spec_retention = None + self.bio_spec_description = None + self.target_duration = None + self.number_groups_cohorts = None __tablename__ = "study_design" - id = db.Column(db.CHAR(36), primary_key=True) design_allocation = db.Column(db.String, nullable=True) - study_type = db.Column(db.String, nullable=False) + study_type = db.Column(db.String, nullable=True) design_intervention_model = db.Column(db.String, nullable=True) - design_intervention_model_description = db.Column(db.String, nullable=True) + design_intervention_model_description = db.Column(db.String, nullable=False) design_primary_purpose = db.Column(db.String, nullable=True) design_masking = db.Column(db.String, nullable=True) design_masking_description = db.Column(db.String, nullable=True) design_who_masked_list = db.Column(ARRAY(String), nullable=True) phase_list = db.Column(ARRAY(String), nullable=True) - enrollment_count = db.Column(db.Integer, nullable=False) - enrollment_type = db.Column(db.String, nullable=False) + enrollment_count = db.Column(db.Integer, nullable=True) + enrollment_type = db.Column(db.String, nullable=True) number_arms = db.Column(db.Integer, nullable=True) design_observational_model_list = db.Column(ARRAY(String), nullable=True) design_time_perspective_list = db.Column(ARRAY(String), nullable=True) @@ -57,14 +53,16 @@ def __init__(self, study: Study): number_groups_cohorts = db.Column(db.Integer, nullable=True) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, ) study = db.relationship("Study", back_populates="study_design") def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, "design_allocation": self.design_allocation, "study_type": self.study_type, "design_intervention_model": self.design_intervention_model, diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 886be8e8..752a69f4 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -1,5 +1,3 @@ -import uuid - from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -12,46 +10,46 @@ class StudyEligibility(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study: Study): - self.id = str(uuid.uuid4()) self.study = study - self.gender = "" - self.gender_based = "" + self.gender = None + self.gender_based = None self.gender_description = "" - self.minimum_age_value = 18 - self.maximum_age_value = 60 - self.minimum_age_unit = "" - self.maximum_age_unit = "" - self.healthy_volunteers = "" + self.minimum_age_value = None # 18 + self.maximum_age_value = None # 60 + self.minimum_age_unit = None + self.maximum_age_unit = None + self.healthy_volunteers = None self.inclusion_criteria = [] self.exclusion_criteria = [] self.study_population = "" - self.sampling_method = "" + self.sampling_method = None __tablename__ = "study_eligibility" - id = db.Column(db.CHAR(36), primary_key=True) - gender = db.Column(db.String, nullable=False) - gender_based = db.Column(db.String, nullable=False) + gender = db.Column(db.String, nullable=True) + gender_based = db.Column(db.String, nullable=True) gender_description = db.Column(db.String, nullable=False) - minimum_age_value = db.Column(db.Integer, nullable=False) - maximum_age_value = db.Column(db.Integer, nullable=False) - minimum_age_unit = db.Column(db.String, nullable=False) - maximum_age_unit = db.Column(db.String, nullable=False) + minimum_age_value = db.Column(db.Integer, nullable=True) + maximum_age_value = db.Column(db.Integer, nullable=True) + minimum_age_unit = db.Column(db.String, nullable=True) + maximum_age_unit = db.Column(db.String, nullable=True) healthy_volunteers = db.Column(db.String, nullable=True) inclusion_criteria = db.Column(ARRAY(String), nullable=False) exclusion_criteria = db.Column(ARRAY(String), nullable=False) - study_population = db.Column(db.String, nullable=True) + study_population = db.Column(db.String, nullable=False) sampling_method = db.Column(db.String, nullable=True) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, ) study = db.relationship("Study", back_populates="study_eligibility") def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, "gender": self.gender, "gender_based": self.gender_based, "gender_description": self.gender_description, diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 5f31affb..f6373bf1 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -13,11 +13,14 @@ def __init__(self, study, secondary): self.study = study self.secondary = secondary self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.identifier = "" + self.identifier_domain = "" + self.identifier_link = "" __tablename__ = "study_identification" id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) - identifier_type = db.Column(db.String, nullable=False) + identifier_type = db.Column(db.String, nullable=True) identifier_domain = db.Column(db.String, nullable=False) identifier_link = db.Column(db.String, nullable=False) secondary = db.Column(db.BOOLEAN, nullable=False) diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 7ca82b89..0230b832 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -21,7 +21,7 @@ def __init__(self, study): __tablename__ = "study_intervention" id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) name = db.Column(db.String, nullable=False) description = db.Column(db.String, nullable=False) arm_group_label_list = db.Column(ARRAY(String), nullable=False) diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 21d18ef4..9d7868bc 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -1,5 +1,3 @@ -import uuid - from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -12,9 +10,8 @@ class StudyIpdsharing(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): - self.id = str(uuid.uuid4()) self.study = study - self.ipd_sharing = "" + self.ipd_sharing = None self.ipd_sharing_description = "" self.ipd_sharing_info_type_list = [] self.ipd_sharing_time_frame = "" @@ -23,8 +20,7 @@ def __init__(self, study): __tablename__ = "study_ipdsharing" - id = db.Column(db.CHAR(36), primary_key=True) - ipd_sharing = db.Column(db.String, nullable=False) + ipd_sharing = db.Column(db.String, nullable=True) ipd_sharing_description = db.Column(db.String, nullable=False) ipd_sharing_info_type_list = db.Column(ARRAY(String), nullable=False) ipd_sharing_time_frame = db.Column(db.String, nullable=False) @@ -32,14 +28,16 @@ def __init__(self, study): ipd_sharing_url = db.Column(db.String, nullable=False) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, ) study = db.relationship("Study", back_populates="study_ipdsharing") def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, "ipd_sharing": self.ipd_sharing, "ipd_sharing_description": self.ipd_sharing_description, "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 808a1401..06e640d6 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -19,7 +19,7 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) facility = db.Column(db.String, nullable=False) - status = db.Column(db.String, nullable=False) + status = db.Column(db.String, nullable=True) city = db.Column(db.String, nullable=False) state = db.Column(db.String, nullable=False) zip = db.Column(db.String, nullable=False) diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 4bd272aa..d23acac7 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -1,5 +1,3 @@ -import uuid - from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -12,7 +10,6 @@ class StudyOther(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): - self.id = str(uuid.uuid4()) self.study = study self.oversight_has_dmc = False self.conditions = [] @@ -21,21 +18,22 @@ def __init__(self, study): __tablename__ = "study_other" - id = db.Column(db.CHAR(36), primary_key=True) oversight_has_dmc = db.Column(db.BOOLEAN, nullable=False) conditions = db.Column(ARRAY(String), nullable=False) keywords = db.Column(ARRAY(String), nullable=False) size = db.Column(db.BigInteger, nullable=False) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, ) study = db.relationship("Study", back_populates="study_other") def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, "oversight_has_dmc": self.oversight_has_dmc, "conditions": self.conditions, "keywords": self.keywords, diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 0035fe2e..e173cd0b 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -20,7 +20,7 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) name = db.Column(db.String, nullable=False) affiliation = db.Column(db.String, nullable=False) - role = db.Column(db.String, nullable=False) + role = db.Column(db.String, nullable=True) created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column( diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index d64706c4..113af7b0 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -19,7 +19,7 @@ def __init__(self, study): id = db.Column(db.CHAR(36), primary_key=True) identifier = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=True) citation = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 1ca59401..a8ab84f5 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -1,5 +1,3 @@ -import uuid - from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY @@ -12,9 +10,8 @@ class StudySponsorsCollaborators(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): - self.id = str(uuid.uuid4()) self.study = study - self.responsible_party_type = "" + self.responsible_party_type = None self.responsible_party_investigator_name = "" self.responsible_party_investigator_title = "" self.responsible_party_investigator_affiliation = "" @@ -23,8 +20,7 @@ def __init__(self, study): __tablename__ = "study_sponsors_collaborators" - id = db.Column(db.CHAR(36), primary_key=True) - responsible_party_type = db.Column(db.String, nullable=False) + responsible_party_type = db.Column(db.String, nullable=True) responsible_party_investigator_name = db.Column(db.String, nullable=False) responsible_party_investigator_title = db.Column(db.String, nullable=False) responsible_party_investigator_affiliation = db.Column(db.String, nullable=False) @@ -32,14 +28,16 @@ def __init__(self, study): collaborator_name = db.Column(ARRAY(String), nullable=False) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, ) study = db.relationship("Study", back_populates="study_sponsors_collaborators") def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, "responsible_party_type": self.responsible_party_type, "responsible_party_investigator_name": self.responsible_party_investigator_name, "responsible_party_investigator_title": self.responsible_party_investigator_title, diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index c2003bb7..24920e32 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -1,5 +1,3 @@ -import uuid - from model import Study from ..db import db @@ -9,7 +7,6 @@ class StudyStatus(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): - self.id = str(uuid.uuid4()) self.study = study self.overall_status = None self.why_stopped = "" @@ -20,7 +17,6 @@ def __init__(self, study): __tablename__ = "study_status" - id = db.Column(db.CHAR(36), primary_key=True) overall_status = db.Column(db.String, nullable=True) why_stopped = db.Column(db.String, nullable=False) start_date = db.Column(db.String, nullable=True) @@ -29,14 +25,16 @@ def __init__(self, study): completion_date_type = db.Column(db.String, nullable=True) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, ) study = db.relationship("Study", back_populates="study_status") def to_dict(self): """Converts the study to a dictionary""" return { - "id": self.id, "overall_status": self.overall_status, "why_stopped": self.why_stopped, "start_date": self.start_date, diff --git a/model/token_blacklist.py b/model/token_blacklist.py index 8bb830e0..0181cb4e 100644 --- a/model/token_blacklist.py +++ b/model/token_blacklist.py @@ -6,6 +6,9 @@ class TokenBlacklist(db.Model): # type: ignore jti = db.Column(db.CHAR(36), primary_key=True) exp = db.Column(db.String, nullable=False) + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + user = db.relationship("User", back_populates="token_blacklist") + def to_dict(self): return { "jti": self.jti, diff --git a/model/user.py b/model/user.py index c252b9a2..9e741efc 100644 --- a/model/user.py +++ b/model/user.py @@ -27,6 +27,8 @@ def __init__(self, password): study_contributors = db.relationship("StudyContributor", back_populates="user") email_verification = db.relationship("EmailVerification", back_populates="user") user_details = db.relationship("UserDetails", uselist=False, back_populates="user") + token_blacklist = db.relationship("TokenBlacklist", back_populates="user") + notification = db.relationship("Notification", back_populates="user") def to_dict(self): return { diff --git a/model/version.py b/model/version.py index 8829659b..9cff0264 100644 --- a/model/version.py +++ b/model/version.py @@ -20,6 +20,7 @@ class Version(db.Model): # type: ignore def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) + self.created_at = datetime.datetime.now(timezone.utc).timestamp() __tablename__ = "version" id = db.Column(db.CHAR(36), primary_key=True) @@ -32,9 +33,19 @@ def __init__(self, dataset): created_at = db.Column(db.BigInteger, nullable=False) published_on = db.Column(db.BigInteger, nullable=False) + version_readme = db.relationship( + "VersionReadme", + uselist=False, + back_populates="version", + cascade="all, delete", + ) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) dataset = db.relationship("Dataset", back_populates="dataset_versions") - participants = db.relationship("Participant", secondary=version_participants) + participants = db.relationship( + "Participant", + secondary=version_participants, + cascade="all, delete", + ) def to_dict(self): return { @@ -46,9 +57,10 @@ def to_dict(self): "created_at": self.created_at, "doi": self.doi, "published": self.published, - "participants": [p.id for p in self.participants] - if isinstance(self.participants, (list, set)) - else [], + "readme": self.version_readme.content if self.version_readme else "" + # "participants": [p.id for p in self.participants] + # if isinstance(self.participants, (list, set)) + # else [], } # [p.id for p in self.participants] @@ -61,9 +73,9 @@ def from_data(dataset: Dataset, data: dict): def update(self, data: dict): self.title = data["title"] - self.published = data["published"] - self.doi = data["doi"] + self.published = data["published"] if "published" in data else False + self.doi = data["doi"] if "doi" in data else "" self.published_on = datetime.datetime.now(timezone.utc).timestamp() self.updated_on = datetime.datetime.now(timezone.utc).timestamp() - self.participants[:] = data["participants"] - self.changelog = data["changelog"] + # self.participants[:] = data["participants"] + self.changelog = data["changelog"] if "changelog" in data else "" diff --git a/model/version_readme.py b/model/version_readme.py new file mode 100644 index 00000000..ec657f54 --- /dev/null +++ b/model/version_readme.py @@ -0,0 +1,25 @@ +from .db import db + + +class VersionReadme(db.Model): # type: ignore + __tablename__ = "version_readme" + content = db.Column(db.String, nullable=True) + + version_id = db.Column( + db.CHAR(36), db.ForeignKey("version.id"), primary_key=True, nullable=False + ) + version = db.relationship("Version", back_populates="version_readme") + + def to_dict(self): + return { + "content": self.content, + } + + @staticmethod + def from_data(data: dict): + user = VersionReadme() + user.update(data) + return user + + def update(self, data: dict): + self.content = data["content"] diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index 346c6ee3..df3000ca 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -308,7 +308,7 @@ def test_put_description_metadata(_test_client, _login_user): assert response.status_code == 200 response_data = json.loads(response.data) - pytest.global_description_id = response_data["id"] + # pytest.global_id = response_data["study_id"] assert response_data["brief_summary"] == "brief_summary" assert response_data["detailed_description"] == "detailed_description" From 2f938f752ab4decdb385e2710ddd40e100f63e49 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Wed, 25 Oct 2023 19:12:39 -0700 Subject: [PATCH 316/505] feat: redcap api, caching, upgrade python --- .gitignore | 1 + README.md | 19 +- apis/__init__.py | 17 + apis/redcap.py | 178 ++ apis/redcap_data/__init__.py | 5 + apis/redcap_data/redcap_project_data.py | 86 + .../redcap_report_participant_values_data.py | 149 ++ .../redcap_report_participants_data.py | 57 + .../redcap_report_repeat_surveys_data.py | 62 + .../redcap_report_survey_completions_data.py | 171 ++ apis/redcap_data_namespace.py | 3 + app.py | 37 +- cache-docker-compose.yaml | 20 + caching/__init__.py | 1 + caching/cache.py | 27 + config.py | 9 + dev-docker-compose.yaml | 21 + model/__init__.py | 2 + model/study.py | 8 + model/study_metadata/study_redcap.py | 71 + model/study_redcap.py | 70 + modules/__init__.py | 1 + modules/etl/__init__.py | 3 + modules/etl/config/__init__.py | 8 + modules/etl/config/aireadi_config.py | 1688 ++++++++++++++ modules/etl/transforms/__init__.py | 2 + modules/etl/transforms/module_transform.py | 297 +++ modules/etl/transforms/redcap_transform.py | 763 +++++++ modules/etl/vtypes/__init__.py | 11 + modules/etl/vtypes/categorical.py | 36 + modules/etl/vtypes/compound.py | 50 + modules/etl/vtypes/continuous.py | 31 + modules/etl/vtypes/discrete.py | 25 + modules/etl/vtypes/mixed.py | 56 + modules/etl/vtypes/timeseries.py | 48 + modules/etl/vtypes/vtype.py | 46 + poetry.lock | 1965 +++++++++-------- pyproject.toml | 7 +- 38 files changed, 5128 insertions(+), 923 deletions(-) create mode 100644 apis/redcap.py create mode 100644 apis/redcap_data/__init__.py create mode 100644 apis/redcap_data/redcap_project_data.py create mode 100644 apis/redcap_data/redcap_report_participant_values_data.py create mode 100644 apis/redcap_data/redcap_report_participants_data.py create mode 100644 apis/redcap_data/redcap_report_repeat_surveys_data.py create mode 100644 apis/redcap_data/redcap_report_survey_completions_data.py create mode 100644 apis/redcap_data_namespace.py create mode 100644 cache-docker-compose.yaml create mode 100644 caching/__init__.py create mode 100644 caching/cache.py create mode 100644 model/study_metadata/study_redcap.py create mode 100644 model/study_redcap.py create mode 100644 modules/__init__.py create mode 100644 modules/etl/__init__.py create mode 100644 modules/etl/config/__init__.py create mode 100644 modules/etl/config/aireadi_config.py create mode 100644 modules/etl/transforms/__init__.py create mode 100644 modules/etl/transforms/module_transform.py create mode 100644 modules/etl/transforms/redcap_transform.py create mode 100644 modules/etl/vtypes/__init__.py create mode 100644 modules/etl/vtypes/categorical.py create mode 100644 modules/etl/vtypes/compound.py create mode 100644 modules/etl/vtypes/continuous.py create mode 100644 modules/etl/vtypes/discrete.py create mode 100644 modules/etl/vtypes/mixed.py create mode 100644 modules/etl/vtypes/timeseries.py create mode 100644 modules/etl/vtypes/vtype.py diff --git a/.gitignore b/.gitignore index a78d352e..aa257b35 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,7 @@ coverage # Environment variables .env +.venv # Database postgres_data/* diff --git a/README.md b/README.md index 98a45277..96ab1847 100644 --- a/README.md +++ b/README.md @@ -90,17 +90,32 @@ If you would like to update the api, please follow the instructions below. The api uses a postgres database. You can run a postgres database locally using docker: ```bash -docker-compose -f ./db-docker-compose.yml up +docker-compose -f ./db-docker-compose.yaml up ``` Close the database with: ```bash -docker-compose -f ./db-docker-compose.yml down -v +docker-compose -f ./db-docker-compose.yaml down -v ``` This database will not persist data between runs. +### Caching + +The api uses a redis cache. You can run a redis cache locally using docker, too: +```bash +docker-compose -f ./cache-docker-compose.yaml up +``` + +Shut down the cache with: + +```bash +docker-compose -f ./cache-docker-compose.yaml down -v +``` + +Like the database, the cache will not persist between runs. + ### API If you would like to run the api locally, you can use docker. diff --git a/apis/__init__.py b/apis/__init__.py index 26c8f39e..df143d8a 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -3,6 +3,7 @@ from apis.dataset_metadata_namespace import api as dataset_metadata_namespace from apis.study_metadata_namespace import api as study_metadata_namespace +from apis.redcap_data_namespace import api as redcap_data_namespace from .authentication import api as authentication from .contributor import api as contributors_api @@ -41,6 +42,12 @@ from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator from .study_metadata.study_status import api as status from .user import api as user +from .redcap import api as redcap +from .redcap_data.redcap_project_data import api as redcap_project_data +from .redcap_data.redcap_report_participants_data import api as redcap_report_participants_data +from .redcap_data.redcap_report_participant_values_data import api as redcap_report_participants_values_data +from .redcap_data.redcap_report_repeat_surveys_data import api as redcap_report_repeat_surveys_data +from .redcap_data.redcap_report_survey_completions_data import api as redcap_report_survey_completions_data api = Api( title="FAIRHUB", @@ -51,6 +58,7 @@ __all__ = [ "dataset_metadata_namespace", "study_metadata_namespace", + "redcap_data_namespace", "authentication", "contributors_api", "dataset_api", @@ -89,11 +97,18 @@ "identification", "study_description", "dataset_contributor", + "redcap", + "redcap_project_data", + "redcap_report_participants_data", + "redcap_report_participants_values_data", + "redcap_report_repeat_surveys_data", + "redcap_report_survey_completions_data", ] api.add_namespace(dataset_metadata_namespace) api.add_namespace(study_metadata_namespace) +api.add_namespace(redcap_data_namespace) api.add_namespace(authentication) @@ -112,3 +127,5 @@ def get(self): api.add_namespace(participants_api) api.add_namespace(contributors_api) api.add_namespace(user) +api.add_namespace(redcap) + diff --git a/apis/redcap.py b/apis/redcap.py new file mode 100644 index 00000000..e3a45b88 --- /dev/null +++ b/apis/redcap.py @@ -0,0 +1,178 @@ +"""API routes for study redcap""" +import typing + +from flask import request +from flask_restx import Namespace, Resource, fields +from jsonschema import ValidationError, validate + +import model + +from .authentication import is_granted + +api = Namespace("Redcap", description="Redcap operations", path="/") + +redcap_model = api.model( + "Redcap", + { + "id": fields.String(required=True), + "redcap_api_token": fields.String(required=True), + "redcap_api_url": fields.String(required=True), + "redcap_project_id": fields.String(required=True), + "redcap_report_id_survey_completions": fields.String(required=True), + "redcap_report_id_repeat_surveys": fields.String(required=True), + "redcap_report_id_participant_values": fields.String(required=True), + "redcap_report_id_participants": fields.String(required=True), + }, +) + +@api.route("/study//redcap") +class Redcap(Resource): + """Study Redcap Metadata""" + + @api.doc("redcap") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_model) + def get(self, study_id: int, redcap_project_id: str): + """Get study redcap""" + study_ = model.Study.query.get(study_id) + study_redcap_ = study_.study_redcap + return study_redcap_.to_dict() + + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_model) + def post(self, study_id: int): + """Update study redcap""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "redcap_api_token", + "redcap_api_url", + "redcap_project_id", + "redcap_report_id_survey_completions", + "redcap_report_id_repeat_surveys", + "redcap_report_id_participant_values", + "redcap_report_id_participants", + ], + "properties": { + "redcap_api_token": {"type": string, "minLength": 1}, + "redcap_api_url": {"type": string, "minLength": 1}, + "redcap_project_id": {"type": string, "minLength": 1}, + "redcap_report_id_participants": {"type": string, "minLength": 1}, + "redcap_report_id_survey_completions": {"type": string}, + "redcap_report_id_repeat_surveys": {"type": string}, + "redcap_report_id_participant_values": {"type": string}, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[typing.Any, dict] = request.json + if len(data["redcap_api_url"]) < 1: + return ( + f"recap_api_url is required for redcap access: {data['redcap_api_url']}", + 400, + ) + if len(data["redcap_api_token"]) < 1: + return ( + f"recap_api_token is required for redcap access: {data['redcap_api_token']}", + 400, + ) + if len(data["redcap_project_id"]) < 1: + return ( + f"recap_project_id is required for redcap access: {data['redcap_project_id']}", + 400, + ) + + study_obj = model.Study.query.get(study_id) + if not is_granted("viewer", study_id): + return "Access denied, you can not modify", 403 + study = model.Study.query.get(study_id) + study.study_redcap.update(request.json) + model.db.session.commit() + + return study.study_redcap.to_dict() + +@api.route("/study//redcap/") +class RedcapUpdate(Resource): + @api.doc("redcap") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_model) + def delete(self, study_id: int, redcap_project_id: str): + """Delete study redcap metadata""" + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + study_redcap_ = model.StudyRedcap.query.get(study_id) + model.db.session.delete(study_redcap_) + model.db.session.commit() + + return 204 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_model) + def put(self, study_id: int): + """Update study redcap""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "redcap_api_token", + "redcap_api_url", + "redcap_project_id", + "redcap_report_id_survey_completions", + "redcap_report_id_repeat_surveys", + "redcap_report_id_participant_values", + "redcap_report_id_participants", + ], + "properties": { + "redcap_api_token": {"type": string, "minLength": 1}, + "redcap_api_url": {"type": string, "minLength": 1}, + "redcap_project_id": {"type": string, "minLength": 1}, + "redcap_report_id_participants": {"type": string, "minLength": 1}, + "redcap_report_id_survey_completions": {"type": string}, + "redcap_report_id_repeat_surveys": {"type": string}, + "redcap_report_id_participant_values": {"type": string}, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[typing.Any, dict] = request.json + if len(data["redcap_api_url"]) < 1: + return ( + f"recap_api_url is required for redcap access: {data['redcap_api_url']}", + 400, + ) + if len(data["redcap_api_token"]) < 1: + return ( + f"recap_api_token is required for redcap access: {data['redcap_api_token']}", + 400, + ) + if len(data["redcap_project_id"]) < 1: + return ( + f"recap_project_id is required for redcap access: {data['redcap_project_id']}", + 400, + ) + + study_obj = model.Study.query.get(study_id) + if not is_granted("viewer", study_id): + return "Access denied, you can not modify", 403 + study = model.Study.query.get(study_id) + study.study_redcap.update(request.json) + model.db.session.commit() + + return study.study_redcap.to_dict() + diff --git a/apis/redcap_data/__init__.py b/apis/redcap_data/__init__.py new file mode 100644 index 00000000..3f1960b0 --- /dev/null +++ b/apis/redcap_data/__init__.py @@ -0,0 +1,5 @@ +from .redcap_project_data import RedcapProjectDataResource +from .redcap_report_participants_data import RedcapReportParticipantsDataResource +from .redcap_report_participant_values_data import RedcapReportParticipantValuesDataResource +from .redcap_report_repeat_surveys_data import RedcapReportRepeatSurveysDataResource +from .redcap_report_survey_completions_data import RedcapReportSurveyCompletionsDataResource diff --git a/apis/redcap_data/redcap_project_data.py b/apis/redcap_data/redcap_project_data.py new file mode 100644 index 00000000..77ea86e5 --- /dev/null +++ b/apis/redcap_data/redcap_project_data.py @@ -0,0 +1,86 @@ +"""API routes for redcap project""" +import typing + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.redcap_data_namespace import api + +from ..authentication import is_granted + +# # REDCap Data Visualization ETL Configuration +# from modules.etl.config import redcapTransformConfig +# from modules.etl.config import sexGenderTransformConfig +# from modules.etl.config import raceEthnicityTransformConfig +# from modules.etl.config import phenotypeTransformConfig +# from modules.etl.config import studyWaypointsTransformConfig + +# # ETL Modules +# from modules.etl import transforms + +# Import In-Memory Cache + +redcap_project_data = api.model("RedcapProject", { + "project_id": fields.String(required=True, readonly=True, description=""), + "project_title": fields.String(required=True, readonly=True, description=""), + "creation_time": fields.String(required=True, readonly=True, description=""), + "production_time": fields.String(required=True, readonly=True, description=""), + "in_production": fields.Boolean(r =True, description=""), + "project_language": fields.String(required=True, readonly=True, description=""), + "purpose": fields.Integer(required=True, readonly=True, description=""), + "purpose_other": fields.Integer(required=True, readonly=True, description=""), + "project_notes": fields.String(required=True, readonly=True, description=""), + "custom_record_label": fields.String(required=True, readonly=True, description=""), + "secondary_unique_field": fields.String( + required=True, readonly=True, description="" + ), + "is_longitudinal": fields.Boolean(required=True, readonly=True, description=""), + "has_repeating_instruments_or_events": fields.Boolean( + required=True, readonly=True, description="" + ), + "surveys_enabled": fields.Boolean(required=True, readonly=True, description=""), + "scheduling_enabled": fields.Boolean(required=True, readonly=True, description=""), + "record_autonumbering_enabled": fields.Boolean( + required=True, readonly=True, description="" + ), + "randomization_enabled": fields.Boolean( + required=True, readonly=True, description="" + ), + "ddp_enabled": fields.Boolean(required=True, readonly=True, description=""), + "project_irb_number": fields.String(required=True, readonly=True, description=""), + "project_grant_number": fields.String(required=True, readonly=True, description=""), + "project_pi_firstname": fields.String(required=True, readonly=True, description=""), + "project_pi_lastname": fields.String(required=True, readonly=True, description=""), + "display_today_now_button": fields.Boolean( + required=True, readonly=True, description="" + ), + "missing_data_codes": fields.String(required=True, readonly=True, description=""), + "external_modules": fields.String(required=True, readonly=True, description=""), + "bypass_branching_erase_field_prompt": fields.Boolean( + required=True, readonly=True, description="" + ), +}) + +@api.route("/study//redcap//project") +class RedcapProjectDataResource(Resource): + @api.doc("project") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_data) + # @cache.cached() + def get(self, study_id: int, redcap_project_id: str): + """ + Get REDCap project + + TODO: Will need to use project_id to query SQL/KeyVault to + get the correct REDCap API URL and token. For now, + we'll just assume we have access through globals. + """ + study_ = model.Study.query.get(study_id) + study_redcap_ = study_.study_redcap.to_dict() + PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) + project = PyCapProject.export_project_info() + return project + diff --git a/apis/redcap_data/redcap_report_participant_values_data.py b/apis/redcap_data/redcap_report_participant_values_data.py new file mode 100644 index 00000000..aac486d3 --- /dev/null +++ b/apis/redcap_data/redcap_report_participant_values_data.py @@ -0,0 +1,149 @@ +"""API routes for redcap report participant values data""" +import typing + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.redcap_data_namespace import api + +# # REDCap Data Visualization ETL Configuration +# from modules.etl.config import redcapTransformConfig +# from modules.etl.config import sexGenderTransformConfig +# from modules.etl.config import raceEthnicityTransformConfig +# from modules.etl.config import phenotypeTransformConfig +# from modules.etl.config import studyWaypointsTransformConfig + +# # ETL Modules +# from modules.etl import transforms + +from ..authentication import is_granted + +# Import In-Memory Cache +# from __main__ import IN_MEMORY_CACHE + +redcap_report_participant_values_data = api.model("RedcapReportParticipantValuesData", { + "record_id": fields.String( + required=True, readonly=True, description="Participant record ID" + ), + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "siteid": fields.String(required=True, readonly=True, description="Site ID"), + "dm": fields.String( + required=True, readonly=True, description="Data approved for Fairhub.io" + ), + "siteid": fields.String(required=True, readonly=True, description="Site ID"), + "genderid": fields.String( + required=True, readonly=True, description="Gender identity" + ), + "scrsex": fields.String(required=True, readonly=True, description="Sex at birth"), + "race": fields.String(required=True, readonly=True, description="Race"), + "race2": fields.String( + required=True, readonly=True, description="Race further defined" + ), + "ethnic": fields.String(required=True, readonly=True, description="Ethnicity"), + "dvenvyn": fields.String( + required=True, readonly=True, description="Environmental sensor distributed" + ), + "dvenvstdat": fields.String( + required=True, + readonly=True, + description="Date of environmental sensor distribution", + ), + "dvenvcrcid": fields.String( + required=True, + readonly=True, + description="Was environmental sensor demonstrated?", + ), + "dvcgmyn": fields.String( + required=True, readonly=True, description="Continuous glucose monitor inserted" + ), + "dvcgmstdat": fields.String( + required=True, + readonly=True, + description="Date of continuous glucose monitor was inserted", + ), + "dvcgmvrfy": fields.String( + required=True, + readonly=True, + description="Continuous glucose monitor initialized and recording?", + ), + "dvamwyn": fields.String( + required=True, + readonly=True, + description="Was the Apple watch sent home with the participant?", + ), + "dvamwstdat": fields.String( + required=True, + readonly=True, + description="Date Apple watch was given to participant", + ), + "dvamwsn": fields.String( + required=True, readonly=True, description="Apple watch serial number" + ), + "dvrtmthd": fields.String( + required=True, readonly=True, description="Planned method of device return" + ), + "dvrtnyn": fields.String( + required=True, + readonly=True, + description="Was the participant given device return instructions and shipping materials?", + ), + "dvrtnship": fields.String( + required=True, readonly=True, description="Return shipping tracking number" + ), + "mhterm_dm1": fields.String( + required=True, readonly=True, description="Type I diabetes" + ), + "mhterm_dm2": fields.String( + required=True, readonly=True, description="Type II diabetes" + ), + "mhterm_predm": fields.String( + required=True, readonly=True, description="Pre-diabetes" + ), + "mh_dm_age": fields.String( + required=True, readonly=True, description="Age diagnosed with type II diabetes" + ), + "mh_a1c": fields.String( + required=True, readonly=True, description="Elevated A1C levels" + ), + "cmtrt_a1c": fields.String( + required=True, + readonly=True, + description="Taking pills to control A1C and blood glucose levels?", + ), + "cmtrt_insln": fields.String( + required=True, + readonly=True, + description="Injecting insulin to control blood glucose levels", + ), + "cmtrt_glcs": fields.String( + required=True, + readonly=True, + description="Using other injectables to control blood glucose levels", + ), + "cmtrt_lfst": fields.String( + required=True, + readonly=True, + description="Using lifestyle changes to control blood glucose levels", + ), + "scrcmpdat": fields.String( + required=True, readonly=True, description="Screening survey completion date" + ), +}) + +@api.route("/study//redcap//participant-values") +class RedcapReportParticipantValuesDataResource(Resource): + @api.doc("report_participant_values_data") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_report_participant_values_data) + # @IN_MEMORY_CACHE.cached() + def get(self, study_id: int, redcap_project_id: str): + study_ = model.Study.query.get(study_id) + study_redcap_ = study_.study_redcap.to_dict() + PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) + participant_values = PyCapProject.export_report(study_redcap_["redcap_report_id_participant_values"]) + return participant_values diff --git a/apis/redcap_data/redcap_report_participants_data.py b/apis/redcap_data/redcap_report_participants_data.py new file mode 100644 index 00000000..7dfa2aa7 --- /dev/null +++ b/apis/redcap_data/redcap_report_participants_data.py @@ -0,0 +1,57 @@ +"""API routes for redcap report participants data data""" +import typing + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.redcap_data_namespace import api + +# # REDCap Data Visualization ETL Configuration +# from modules.etl.config import redcapTransformConfig +# from modules.etl.config import sexGenderTransformConfig +# from modules.etl.config import raceEthnicityTransformConfig +# from modules.etl.config import phenotypeTransformConfig +# from modules.etl.config import studyWaypointsTransformConfig + +# # ETL Modules +# from modules.etl import transforms + +from ..authentication import is_granted + +# Import In-Memory Cache + + +redcap_report_participants_data = api.model("RedcapReportParticipantsData", { + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "dm_inperson_data_validated": fields.Integer( + required=True, + readonly=True, + attribute="dm___i", + description="All data collected and validated through in-person visit", + ), + "dm_device_data_validated": fields.Integer( + required=True, + readonly=True, + attribute="dm___d", + description="All device data entered and validated", + ), +}) + +@api.route("/study//redcap//participants") +class RedcapReportParticipantsDataResource(Resource): + @api.doc("report_participants_data") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_report_participants_data) + # @IN_MEMORY_CACHE.cached() + def get(self, study_id: int, redcap_project_id: str): + study_ = model.Study.query.get(study_id) + study_redcap_ = study_.study_redcap.to_dict() + PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) + participants = PyCapProject.export_report(study_redcap_["redcap_report_id_participants"]) + return participants + diff --git a/apis/redcap_data/redcap_report_repeat_surveys_data.py b/apis/redcap_data/redcap_report_repeat_surveys_data.py new file mode 100644 index 00000000..ff706bc0 --- /dev/null +++ b/apis/redcap_data/redcap_report_repeat_surveys_data.py @@ -0,0 +1,62 @@ +"""API routes for redcap report repeat surveys data""" +import typing + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.redcap_data_namespace import api + +# # REDCap Data Visualization ETL Configuration +# from modules.etl.config import redcapTransformConfig +# from modules.etl.config import sexGenderTransformConfig +# from modules.etl.config import raceEthnicityTransformConfig +# from modules.etl.config import phenotypeTransformConfig +# from modules.etl.config import studyWaypointsTransformConfig + +# # ETL Modules +# from modules.etl import transforms + +from ..authentication import is_granted + +# Import In-Memory Cache +# from __main__ import IN_MEMORY_CACHE + +redcap_report_repeat_surveys_data = api.model("RedcapReportRepeatSurveysData", { + "record_id": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "current_medications_complete": fields.String( + required=True, + readonly=True, + description="All data collected and validated through in-person visit", + ), + "redcap_repeat_instrument": fields.String( + required=True, + readonly=True, + description="All device data entered and validated", + ), + "redcap_repeat_instance": fields.String( + required=True, + readonly=True, + description="All device data entered and validated", + ), +}) + +@api.route("/study//redcap//repeat-surveys") +class RedcapReportRepeatSurveysDataResource(Resource): + @api.doc("report_repeat_surveys_data") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_report_repeat_surveys_data) + # @IN_MEMORY_CACHE.cached() + def get(self, study_id: int, redcap_project_id: str): + study_ = model.Study.query.get(study_id) + study_redcap_ = study_.study_redcap.to_dict() + PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) + repeat_surveys = PyCapProject.export_report(study_redcap_["redcap_report_id_repeat_surveys"]) + return repeat_surveys diff --git a/apis/redcap_data/redcap_report_survey_completions_data.py b/apis/redcap_data/redcap_report_survey_completions_data.py new file mode 100644 index 00000000..df8a98ba --- /dev/null +++ b/apis/redcap_data/redcap_report_survey_completions_data.py @@ -0,0 +1,171 @@ +"""API routes for redcap report survey completions data""" +import typing + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.redcap_data_namespace import api + +# # REDCap Data Visualization ETL Configuration +# from modules.etl.config import redcapTransformConfig +# from modules.etl.config import sexGenderTransformConfig +# from modules.etl.config import raceEthnicityTransformConfig +# from modules.etl.config import phenotypeTransformConfig +# from modules.etl.config import studyWaypointsTransformConfig + +# # ETL Modules +# from modules.etl import transforms + +from ..authentication import is_granted + +# Import In-Memory Cache +# from __main__ import IN_MEMORY_CACHE + +redcap_report_survey_completions_data = api.model("RedcapReportSurveyCompletionsData", { + "record_id": fields.String( + required=True, readonly=True, description="Participant record ID" + ), + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "screening_survey_complete": fields.String( + required=True, readonly=True, description="Screening survey completed" + ), + "study_enrollment_complete": fields.String( + required=True, readonly=True, description="Study enrollment completed" + ), + "recruitment_survey_complete": fields.String( + required=True, readonly=True, description="Recruitment survey completed" + ), + "faq_survey_complete": fields.String( + required=True, readonly=True, description="FAQ survey completed" + ), + "recruitment_survey_management_complete": fields.String( + required=True, + readonly=True, + description="Recruitment survey management completed", + ), + "device_distribution_complete": fields.String( + required=True, readonly=True, description="Device distribution completed" + ), + "preconsent_survey_complete": fields.String( + required=True, readonly=True, description="Pre-consent survey completed" + ), + "consent_survey_complete": fields.String( + required=True, readonly=True, description="Consent survey completed" + ), + "staff_consent_attestation_survey_complete": fields.String( + required=True, + readonly=True, + description="Staff consent attestation survey completed", + ), + "demographics_survey_complete": fields.String( + required=True, readonly=True, description="Demographics survey completed" + ), + "health_survey_complete": fields.String( + required=True, readonly=True, description="Health survey completed" + ), + "substance_use_survey_complete": fields.String( + required=True, readonly=True, description="Substance use survey completed" + ), + "cesd10_survey_complete": fields.String( + required=True, readonly=True, description="CES-D-10 survey completed" + ), + "paid5_dm_survey_complete": fields.String( + required=True, readonly=True, description="PAID-5 DM survey completed" + ), + "diabetes_survey_complete": fields.String( + required=True, readonly=True, description="Diabetes survey completed" + ), + "dietary_survey_complete": fields.String( + required=True, readonly=True, description="Dietary survey completed" + ), + "ophthalmic_survey_complete": fields.String( + required=True, readonly=True, description="Opthalmic survey completed" + ), + "px_sdoh_combined_survey_complete": fields.String( + required=True, readonly=True, description="PhenX SDOH survey completed" + ), + "px_food_insecurity_survey_complete": fields.String( + required=True, + readonly=True, + description="PhenX Food Insecurity survey completed", + ), + "px_neighborhood_environment_survey_complete": fields.String( + required=True, + readonly=True, + description="PhenX Neighborhood Enviroment survey completed", + ), + "px_racial_ethnic_discrimination_survey_complete": fields.String( + required=True, + readonly=True, + description="PhenX Racial/Ethnic Discrimination survey completed", + ), + "decline_participation_survey_complete": fields.String( + required=True, + readonly=True, + description="Decline participation survey completed", + ), + "meds_assessment_complete": fields.String( + required=True, readonly=True, description="Medications assessment completed" + ), + "driving_record_complete": fields.String( + required=True, readonly=True, description="Driving record completed" + ), + "physical_assessment_complete": fields.String( + required=True, readonly=True, description="Physical assessment completed" + ), + "bcva_complete": fields.String( + required=True, readonly=True, description="BCVA completed" + ), + "photopic_mars_complete": fields.String( + required=True, readonly=True, description="Photopic mars completed" + ), + "mesopic_mars_complete": fields.String( + required=True, readonly=True, description="Mesopic mars completed" + ), + "monofilament_complete": fields.String( + required=True, readonly=True, description="Monofilament completed" + ), + "moca_complete": fields.String( + required=True, readonly=True, description="MOCA instrument completed" + ), + "ecg_complete": fields.String( + required=True, readonly=True, description="ECG completed" + ), + "retinal_imaging_v2_complete": fields.String( + required=True, readonly=True, description="Retinal imaging completed" + ), + "lab_results_complete": fields.String( + required=True, readonly=True, description="Lab results completed" + ), + "device_return_complete": fields.String( + required=True, readonly=True, description="Device return completed" + ), + "specimen_management_complete": fields.String( + required=True, readonly=True, description="Specimen management completed" + ), + "disposition_complete": fields.String( + required=True, readonly=True, description="Participant disposition completed" + ), + "data_management_complete": fields.String( + required=True, readonly=True, description="Fairhub.io data management completed" + ), +}) + + +@api.route("/study//redcap//survey-completions") +class RedcapReportSurveyCompletionsDataResource(Resource): + @api.doc("report_survey_completions_data") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_report_survey_completions_data) + # @IN_MEMORY_CACHE.cached() + def get(self, study_id: int, redcap_project_id: str): + study_ = model.Study.query.get(study_id) + study_redcap_ = study_.study_redcap.to_dict() + PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) + survey_completions = PyCapProject.export_report(study_redcap_["redcap_report_id_survey_completions"]) + return survey_completions diff --git a/apis/redcap_data_namespace.py b/apis/redcap_data_namespace.py new file mode 100644 index 00000000..35c88091 --- /dev/null +++ b/apis/redcap_data_namespace.py @@ -0,0 +1,3 @@ +from flask_restx import Namespace + +api = Namespace("Redcap Data", description="Redcap data caching operations", path="/") diff --git a/app.py b/app.py index fe69d442..3d038ed5 100644 --- a/app.py +++ b/app.py @@ -9,10 +9,13 @@ from flask import Flask, request from flask_bcrypt import Bcrypt from flask_cors import CORS +from flask_caching import Cache from sqlalchemy import MetaData import config import model +import modules +from caching import create_cache from apis import api from apis.authentication import UnauthenticatedException, authentication, authorization from apis.exception import ValidationException @@ -21,7 +24,6 @@ bcrypt = Bcrypt() - def create_app(config_module=None): """Initialize the core application.""" # create and configure the app @@ -60,7 +62,38 @@ def create_app(config_module=None): # throw error raise RuntimeError("FAIRHUB_DATABASE_URL not set") + # Update this for + + cache = create_cache(app) + + # for key in app.config: + # if "CACHE" in key: + # print(f"{key}: {app.config[key]}") + # if "CACHE_URL" in app.config: + + # app.config["CACHE_URL"] = app.config["CACHE_URL"] + # app.config["CACHE_HOST"]= app.config["CACHE_HOST"] if "CACHE_HOST" in app.config["CACHE_HOST"] else "localhost" + # app.config["CACHE_PORT"]= app.config["CACHE_PORT"] if "CACHE_PORT" in app.config["CACHE_PORT"] else 6379 + # app.config["CACHE_DB"]= app.config["CACHE_DB"] if "CACHE_DB" in app.config["CACHE_DB"] else 0 + # app.config["CACHE_DEFAULT_TIMEOUT"]= app.config["CACHE_DEFAULT_TIMEOUT"] if "CACHE_DEFAULT_TIMEOUT" in app.config else 86400 + # app.config["CACHE_KEY_PREFIX"]= app.config["CACHE_KEY_PREFIX"] if "CACHE_KEY_PREFIX" in app.config else "fairhub-io#" + + # cache = Cache( + # config={ + # "CACHE_TYPE": "RedisCache", + # "CACHE_DEBUG": False, + # "CACHE_DEFAULT_TIMEOUT": app.config["CACHE_DEFAULT_TIMEOUT"], + # "CACHE_KEY_PREFIX": app.config["CACHE_KEY_PREFIX"], + # "CACHE_REDIS_HOST": app.config["CACHE_HOST"], + # "CACHE_REDIS_PORT": app.config["CACHE_PORT"], + # "CACHE_REDIS_DB": app.config["CACHE_DB"], + # "CACHE_REDIS_URL": app.config["CACHE_URL"], + # } + # ) + + # Moved down here to allow for loading of redis cache prior to API model.db.init_app(app) + cache.init_app(app) api.init_app(app) bcrypt.init_app(app) @@ -231,7 +264,7 @@ def destroy_schema(): parser = ArgumentParser() parser.add_argument( - "-p", "--port", default=5000, type=int, help="port to listen on" + "-p", "--port", default=3001, type=int, help="port to listen on" ) args = parser.parse_args() port = args.port diff --git a/cache-docker-compose.yaml b/cache-docker-compose.yaml new file mode 100644 index 00000000..ff06355a --- /dev/null +++ b/cache-docker-compose.yaml @@ -0,0 +1,20 @@ +version: '7.2' +services: + cache: + image: redis:7.2-alpine + restart: always + environment: + CACHE_DB: fairhub + CACHE_HOST: localhost + CACHE_PORT: 6379 + CACHE_URL: redis://127.0.0.1:6379 + CACHE_KEY_PREFIX: fairhub-io# + CACHE_TIMEOUT: 86400 + ports: + - '6379:6379' + command: redis-server --save 20 1 --loglevel warning + volumes: + - cache:/data +volumes: + cache: + driver: local diff --git a/caching/__init__.py b/caching/__init__.py new file mode 100644 index 00000000..11e47a57 --- /dev/null +++ b/caching/__init__.py @@ -0,0 +1 @@ +from .cache import create_cache diff --git a/caching/cache.py b/caching/cache.py new file mode 100644 index 00000000..1714d17d --- /dev/null +++ b/caching/cache.py @@ -0,0 +1,27 @@ +from flask_caching import Cache + +def create_cache (app): + cache = None + try: + app.config["CACHE_URL"] = app.config["CACHE_URL"] if "CACHE_URL" in app.config else "redis://127.0.0.1:6379" + app.config["CACHE_HOST"]= app.config["CACHE_HOST"] if "CACHE_HOST" in app.config else "localhost" + app.config["CACHE_PORT"]= app.config["CACHE_PORT"] if "CACHE_PORT" in app.config else 6379 + app.config["CACHE_DB"]= app.config["CACHE_DB"] if "CACHE_DB" in app.config else 0 + app.config["CACHE_DEFAULT_TIMEOUT"]= app.config["CACHE_DEFAULT_TIMEOUT"] if "CACHE_DEFAULT_TIMEOUT" in app.config else 86400 + app.config["CACHE_KEY_PREFIX"]= app.config["CACHE_KEY_PREFIX"] if "CACHE_KEY_PREFIX" in app.config else "fairhub-io#" + + cache = Cache( + config={ + "CACHE_TYPE": "RedisCache", + "CACHE_DEBUG": False, + "CACHE_DEFAULT_TIMEOUT": app.config["CACHE_DEFAULT_TIMEOUT"], + "CACHE_KEY_PREFIX": app.config["CACHE_KEY_PREFIX"], + "CACHE_REDIS_HOST": app.config["CACHE_HOST"], + "CACHE_REDIS_PORT": app.config["CACHE_PORT"], + "CACHE_REDIS_DB": app.config["CACHE_DB"], + "CACHE_REDIS_URL": app.config["CACHE_URL"], + } + ) + except: + raise RuntimeError("Unable to instantiate cache!") + return cache diff --git a/config.py b/config.py index f1d0304b..634f1df1 100644 --- a/config.py +++ b/config.py @@ -1,5 +1,14 @@ """Configuration for the application.""" from os import environ +from dotenv import load_dotenv +load_dotenv(".env") + FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") +CACHE_DEFAULT_TIMEOUT = environ.get("CACHE_DEFAULT_TIMEOUT") +CACHE_KEY_PREFIX = environ.get("CACHE_KEY_PREFIX") +CACHE_HOST = environ.get("CACHE_HOST") +CACHE_PORT = environ.get("CACHE_PORT") +CACHE_DB = environ.get("CACHE_DB") +CACHE_URL = environ.get("CACHE_URL") diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index b1ecd444..6671b6cb 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -38,3 +38,24 @@ services: # volumes: # - ./postgres-data:/var/lib/postgresql/data # - ./sql/init_timezones.sql:/docker-entrypoint-initdb.d/1-schema.sql + + cache: + image: redis:7.2-alpine + # restart: always + environment: + CACHE_DB: fairhub + CACHE_HOST: localhost + CACHE_PORT: 6379 + CACHE_URL: redis://127.0.0.1:6379 + CACHE_PREFIX: fairhub-io# + CACHE_TIMEOUT: 86400 + CACHE_PASSWORD: development + ports: + - '6379:6379' + command: redis-server --save 20 1 --loglevel warning + volumes: + - cache:/data + # volumes: + # cache: + # driver: local + diff --git a/model/__init__.py b/model/__init__.py index 5b91a29a..7d3845c5 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -53,6 +53,7 @@ from .user_details import UserDetails from .version import Version from .version_readme import VersionReadme +from .study_redcap import StudyRedcap __all__ = [ "Study", @@ -106,4 +107,5 @@ "UserDetails", "Notification", "VersionReadme", + "StudyRedcap", ] diff --git a/model/study.py b/model/study.py index a919bbae..7ffea516 100644 --- a/model/study.py +++ b/model/study.py @@ -27,6 +27,8 @@ def __init__(self): self.study_ipdsharing = model.StudyIpdsharing(self) self.study_description = model.StudyDescription(self) self.study_identification.append(model.StudyIdentification(self, False)) + # NOTE: this has not been tested yet + self.study_redcap = model.StudyRedcap(self) self.study_other = model.StudyOther(self) # self.study_contributors = model.StudyContributor(self) @@ -99,6 +101,12 @@ def __init__(self): back_populates="study", cascade="all, delete", ) + #NOTE: This has not been tested yet + study_redcap = db.relationship( + "StudyRedcap", + back_populates = "study", + cascade="all, delete" + ) study_intervention = db.relationship( "StudyIntervention", back_populates="study", diff --git a/model/study_metadata/study_redcap.py b/model/study_metadata/study_redcap.py new file mode 100644 index 00000000..6a634345 --- /dev/null +++ b/model/study_metadata/study_redcap.py @@ -0,0 +1,71 @@ +from model import Study + +from ..db import db + + +class StudyRedcap(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.study = study + self.redcap_api_token = None + self.redcap_api_url = None + self.redcap_project_id = None + self.redcap_report_id_survey_completions = None + self.redcap_report_id_repeat_surveys = None + self.redcap_report_id_participant_values = None + self.redcap_report_id_participants = None + + __tablename__ = "study_redcap" + + redcap_api_token = db.Column(db.String, nullable=True) + redcap_api_url = db.Column(db.String, nullable=True) + redcap_project_id = db.Column(db.String, nullable=True) + redcap_report_id_survey_completions = db.Column(db.String, nullable=True) + redcap_report_id_repeat_surveys = db.Column(db.String, nullable=True) + redcap_report_id_participant_values = db.Column(db.String, nullable=True) + redcap_report_id_participants = db.Column(db.String, nullable=True) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, + ) + study = db.relationship("Study", back_populates="study_redcap") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "redcap_api_token": self.redcap_api_token, + "redcap_api_url": self.redcap_api_url, + "redcap_project_id": self.redcap_project_id, + "redcap_report_id_survey_completions": self.redcap_report_id_survey_completions, + "redcap_report_id_repeat_surveys": self.redcap_report_id_repeat_surveys, + "redcap_report_id_participant_values": self.redcap_report_id_participant_values, + "redcap_report_id_participants": self.redcap_report_id_participants + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_redcap = StudyRedcap(study) + study_redcap.update(data) + + return study_redcap + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.redcap_api_token = data["redcap_api_token"] + self.redcap_api_url = data["redcap_api_url"] + self.redcap_project_id = data["redcap_project_id"] + self.redcap_report_id_survey_completions = data["redcap_report_id_survey_completions"] + self.redcap_report_id_repeat_surveys = data["redcap_report_id_repeat_surveys"] + self.redcap_report_id_participant_values = data["redcap_report_id_participant_values"] + self.redcap_report_id_participants = data["redcap_report_id_participants"] + self.study.touch() + + def validate(self): + """Validates the study""" + violations: list = [] + return violations diff --git a/model/study_redcap.py b/model/study_redcap.py new file mode 100644 index 00000000..9488bf56 --- /dev/null +++ b/model/study_redcap.py @@ -0,0 +1,70 @@ +from model import Study + +from .db import db + +class StudyRedcap(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.study = study + self.redcap_api_token = None + self.redcap_api_url = None + self.redcap_project_id = None + self.redcap_report_id_survey_completions = None + self.redcap_report_id_repeat_surveys = None + self.redcap_report_id_participant_values = None + self.redcap_report_id_participants = None + + __tablename__ = "study_redcap" + + redcap_api_token = db.Column(db.String, nullable=True) + redcap_api_url = db.Column(db.String, nullable=True) + redcap_project_id = db.Column(db.String, nullable=True) + redcap_report_id_survey_completions = db.Column(db.String, nullable=True) + redcap_report_id_repeat_surveys = db.Column(db.String, nullable=True) + redcap_report_id_participant_values = db.Column(db.String, nullable=True) + redcap_report_id_participants = db.Column(db.String, nullable=True) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, + ) + study = db.relationship("Study", back_populates="study_redcap") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "redcap_api_token": self.redcap_api_token, + "redcap_api_url": self.redcap_api_url, + "redcap_project_id": self.redcap_project_id, + "redcap_report_id_survey_completions": self.redcap_report_id_survey_completions, + "redcap_report_id_repeat_surveys": self.redcap_report_id_repeat_surveys, + "redcap_report_id_participant_values": self.redcap_report_id_participant_values, + "redcap_report_id_participants": self.redcap_report_id_participants + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_redcap = StudyRedcap(study) + study_redcap.update(data) + + return study_redcap + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.redcap_api_token = data["redcap_api_token"] + self.redcap_api_url = data["redcap_api_url"] + self.redcap_project_id = data["redcap_project_id"] + self.redcap_report_id_survey_completions = data["redcap_report_id_survey_completions"] + self.redcap_report_id_repeat_surveys = data["redcap_report_id_repeat_surveys"] + self.redcap_report_id_participant_values = data["redcap_report_id_participant_values"] + self.redcap_report_id_participants = data["redcap_report_id_participants"] + self.study.touch() + + def validate(self): + """Validates the study""" + violations: list = [] + return violations diff --git a/modules/__init__.py b/modules/__init__.py new file mode 100644 index 00000000..1bf7a639 --- /dev/null +++ b/modules/__init__.py @@ -0,0 +1 @@ +from .etl import config, transforms, vtypes diff --git a/modules/etl/__init__.py b/modules/etl/__init__.py new file mode 100644 index 00000000..d344ad14 --- /dev/null +++ b/modules/etl/__init__.py @@ -0,0 +1,3 @@ +from .config import * +from .transforms import * +from .vtypes import * diff --git a/modules/etl/config/__init__.py b/modules/etl/config/__init__.py new file mode 100644 index 00000000..013db7ee --- /dev/null +++ b/modules/etl/config/__init__.py @@ -0,0 +1,8 @@ +from .aireadi_config import ( + redcapTransformConfig, + sexGenderTransformConfig, + raceEthnicityTransformConfig, + phenotypeTransformConfig, + studyWaypointsTransformConfig, + mixedTransformTestConfig, +) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py new file mode 100644 index 00000000..cd6ebcb0 --- /dev/null +++ b/modules/etl/config/aireadi_config.py @@ -0,0 +1,1688 @@ +import os +import dotenv +import numpy as np + +# Load API metadata from .env +dotenv.load_dotenv() + +# Set REDCap API References +REDCAP_API_TOKEN = os.environ["REDCAP_API_TOKEN"] +REDCAP_API_URL = os.environ["REDCAP_API_URL"] + +# Value assigned to missing values unless other specific value defined on function call +# (e.g. REDCapTransform.map_missing_values_by_columns(df, columns, new_missing_value)) +missing_value_generic = "Value Unavailable" + +# Utility Column Groups +index_columns = [ + "record_id", +] + +# Data Column Groups +data_columns = [ + "studyid", + "siteid", + "dm", + "genderid", + "scrsex", + "race", + "race2", + "ethnic", + "dvenvyn", + "dvenvstdat", + "dvenvcrcid", + "dvcgmyn", + "dvcgmstdat", + "dvcgmvrfy", + "dvamwyn", + "dvamwstdat", + "dvamwsn", + "dvrtmthd", + "dvrtnyn", + "dvrtnship", + "mhterm_dm1", + "mhterm_dm2", + "mhterm_predm", + "mh_dm_age", + "mh_a1c", + "cmtrt_a1c", + "cmtrt_insln", + "cmtrt_glcs", + "cmtrt_lfst", + "scrcmpdat", +] + +# Survey Column Groups +survey_columns = [ + "screening_survey_complete", + "study_enrollment_complete", + "recruitment_survey_complete", + "faq_survey_complete", + "recruitment_survey_management_complete", + "device_distribution_complete", + "preconsent_survey_complete", + "consent_survey_complete", + "staff_consent_attestation_survey_complete", + "demographics_survey_complete", + "health_survey_complete", + "substance_use_survey_complete", + "cesd10_survey_complete", + "paid5_dm_survey_complete", + "diabetes_survey_complete", + "dietary_survey_complete", + "ophthalmic_survey_complete", + "px_sdoh_combined_survey_complete", + "px_food_insecurity_survey_complete", + "px_neighborhood_environment_survey_complete", + "px_racial_ethnic_discrimination_survey_complete", + "decline_participation_survey_complete", + "meds_assessment_complete", + "driving_record_complete", + "physical_assessment_complete", + "bcva_complete", + "photopic_mars_complete", + "mesopic_mars_complete", + "monofilament_complete", + "moca_complete", + "ecg_complete", + "retinal_imaging_v2_complete", + "lab_results_complete", + "device_return_complete", + "specimen_management_complete", + "disposition_complete", + "data_management_complete", +] + +# Repeat Survey Column Groups +repeat_survey_columns = [ + "current_medications_complete", +] + +repeat_survey_data_columns = ["current_medications_complete", "current_medications"] + +# +# Value Maps +# + +survey_instrument_map = { + "2": "Complete", + "1": "Unverified", + "0": "Incomplete", +} + +# +# REDCap Transform Config +# + +redcapTransformConfig = { + "reports": [ + ( + "dashboard_data_generic", + {"report_id": 242544}, + [ + ("remap_values_by_columns", {"columns": data_columns}), + ("map_missing_values_by_columns", {"columns": data_columns}), + ("keep_columns", {"columns": index_columns + data_columns}), + ], + ), + ( + "dashboard_data_study_waypoints", + {"report_id": 251954}, + [ + ( + "remap_values_by_columns", + {"columns": survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": survey_columns}), + ("keep_columns", {"columns": index_columns + survey_columns}), + ], + ), + ( + "dashboard_data_repeat_instruments", + {"report_id": 259920}, + [ + ("drop_rows", {"columns": repeat_survey_columns}), + ( + "aggregate_repeat_instrument_column_by_index", + {"aggregator": np.max, "dtype": str}, + ), + ( + "keep_columns", + {"columns": index_columns + repeat_survey_data_columns}, + ), + ], + ), + ], + "merge_transformed_reports": ( + "dashboard_data_generic", + [ + ("dashboard_data_study_waypoints", {"on": index_columns, "how": "inner"}), + ( + "dashboard_data_repeat_instruments", + {"on": index_columns, "how": "outer"}, + ), + ], + ), + "post_merge_transforms": [ + ( + "remap_values_by_columns", + {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), + ], + "index_columns": ["record_id"], + "missing_value_generic": missing_value_generic, +} + +# +# Visualization Transforms +# + +# Sex & Gender Counts by Site +sexGenderTransformConfig = ( + "simpleTransform", + { + "key": "sex-and-gender", + "strict": True, + "transforms": { + "name": "Sex & Gender", + "vtype": "DoubleCategorical", + "method": { + "groups": ["siteid", "scrsex", "genderid"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Gender", + "field": "genderid", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Gender", + "field": "genderid", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + }, +) + +# Race & Ethnicity Counts by Site +raceEthnicityTransformConfig = ( + "simpleTransform", + { + "key": "race-and-ethnicity", + "strict": True, + "transforms": { + "name": "Race & Ethnicity", + "vtype": "DoubleCategorical", + "method": { + "groups": ["siteid", "race", "ethnic"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Ethnicity", + "field": "ethnic", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Ethnicity", + "field": "ethnic", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + }, +) + +# Phenotypes +phenotypeTransformConfig = ( + "simpleTransform", + { + "key": "phenotype", + "strict": True, + "transforms": { + "name": "Type II Diabetes", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "mhterm_dm2"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "mhterm_dm2", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Phenotype", + "field": "mhterm_dm2", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + }, +) + +# Study Waypoints +studyWaypointsTransformConfig = ( + "compoundTransform", + { + "key": "study-waypoints", + "strict": True, + "transforms": [ + { + "name": "Recruitment Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "recruitment_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "FAQ Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "faq_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Screening Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "screening_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Preconsent Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Consent Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Staff Consent Attestation Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "staff_consent_attestation_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Demographics Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "dietary_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "px_neighborhood_environment_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "SingleCategorical", + "method": { + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Decline Participation Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "decline_participation_survey_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Study Enrollment Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "study_enrollment_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Driving Record", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "driving_record_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Distribution", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "meds_assessment_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Physical Assessment", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "physical_assessment_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "BCVA", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Photopic MARS", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Mesopic MARS", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "mesopic_mars_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Monofilament", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "monofilament_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "MOCA", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "moca_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "MOCA", + "field": "moca_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "MOCA", + "field": "moca_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "ECG Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "ecg_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Lab Results Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "lab_results_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Specimen Management", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "specimen_management_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Return", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "device_return_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Disposition Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "disposition_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Disposition Survey", + "field": "disposition_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Disposition Survey", + "field": "disposition_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Data Management Survey", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "data_management_complete"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Sex & Gender Counts by Site +mixedTransformTestConfig = ( + "mixedTransform", + { + "key": "mixed-transform-test", + "strict": True, + "transforms": [ + { + "name": "Sex & Gender", + "vtype": "DoubleCategorical", + "method": { + "groups": ["siteid", "scrsex", "genderid"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Gender", + "field": "genderid", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Gender", + "field": "genderid", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Race & Ethnicity", + "vtype": "DoubleCategorical", + "method": { + "groups": ["siteid", "race", "ethnic"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Ethnicity", + "field": "ethnic", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Ethnicity", + "field": "ethnic", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Type II Diabetes", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "mhterm_dm2"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "mhterm_dm2", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Phenotype", + "field": "mhterm_dm2", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) diff --git a/modules/etl/transforms/__init__.py b/modules/etl/transforms/__init__.py new file mode 100644 index 00000000..7f804252 --- /dev/null +++ b/modules/etl/transforms/__init__.py @@ -0,0 +1,2 @@ +from .redcap_transform import RedcapTransform +from .module_transform import ModuleTransform diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py new file mode 100644 index 00000000..24f0a4db --- /dev/null +++ b/modules/etl/transforms/module_transform.py @@ -0,0 +1,297 @@ +# Library Modules +from typing import Any, Callable, Union, List, Dict, Tuple +from datetime import datetime +import logging, re +import modules.etl.vtypes as vtypes + +# Third-Party Modules +import pandas as pd + + +class ModuleTransform(object): + def __init__( + self: object, + config: Dict[str, Dict[str, Any]], + logging_config: Dict[str, str] = {}, + ) -> None: + # + # Logging + # + + # Logging Config Checks + self.logging_config = {} + self.logging_config["encoding"] = ( + logging_config["encoding"] if "encoding" in logging_config else "utf-8" + ) + self.logging_config["filename"] = ( + logging_config["filename"] + if "filename" in logging_config + else "REDCapETL.log" + ) + self.logging_config["level"] = ( + getattr(logging, logging_config["level"].upper) + if "level" in logging_config + else logging.DEBUG + ) + + # Configure Logging + logging.basicConfig(**self.logging_config) + self.logger = logging.getLogger("VizModTransform") + + # + # References + # + + self.valid = True + + # + # Visualization Variables + # + + # Flag Indicating Whether to Use Strict Typing on Vtype Mapping + self.strict = config["strict"] if "strict" in config else True + + self.key = config["key"] if "key" in config else None + + self.transforms = config["transforms"] if "transforms" in config else None + + if self.transforms is None: + self.valid = False + raise ValueError( + f"ModuleTransform instantiation missing transforms argument" + ) + + # Normalize Transforms List Type, Check Validity, and Warn on Missing Attributes + self.transformList = ( + self.transforms if (type(self.transforms) == list) else [self.transforms] + ) + for transform in enumerate(self.transformList): + self.valid = True if self._transformIsValid(transform) else False + if self.strict and not self.valid: + raise ValueError( + f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" + ) + + self.logger.info(f"{self.key}:Initialized") + + return + + def _transformIsValid(self: object, transform: Tuple[int, Dict[str, Any]]) -> bool: + """ + Transform validator + """ + index, transform = transform + valid = True + if "name" not in transform: + self.logger.error( + f"{self.key}:Transform at index {index} in transforms list missing name property" + ) + valid = False + if "vtype" not in transform: + self.logger.error( + f"{self.key}:Transform at index {index} in transforms list missing vtype property" + ) + valid = False + if "method" not in transform: + self.logger.error( + f"{self.key}:Transform at index {index} in transforms list missing method property" + ) + valid = False + if "accessors" not in transform: + self.logger.error( + f"{self.key}:Transform at index {index} in transforms list missing accessors property" + ) + valid = False + return valid + + def _setValueType( + self: object, + vtype: Any, + record: Dict[str, Any], + key: str, + accessor: Dict[str, Dict[str, str | Callable]], + ) -> Any: + """ + Element-wise type setting method. If value of + element is not the missing value, we cast the + value as the type defined for property in the + vtype. + """ + for pname, _ptype in vtype.props: + if pname == key: + # Accessor Typing + ptype = _ptype if "astype" not in accessor else accessor["astype"] + if ptype != _ptype: + self.logger.warning( + f"Accessor `{pname}` with type `{ptype}` conflicts with VType definition requiring {_ptype}" + ) + if self.strict: + raise ValueError( + f"Accessor `{pname}` with type `{ptype}` conflicts with VType definition requiring {_ptype}" + ) + # Accessor Name + pvalue = record[accessor["field"]] + if pvalue != accessor["missing_value"]: + try: + pvalue = ptype(pvalue) + except (RuntimeError, TypeError) as error: + if self.strict: + self.logger.warning( + f"Unable to cast value {record[key]} to {ptype}" + ) + raise error + else: + self.logger.warning( + f"Unable to cast value {record[key]} to {ptype}" + ) + continue + + return pvalue + + def simpleTransform(self: object, df: pd.DataFrame) -> object: + """ + Performs a pd.DataFrame.groupby transform. The + df is first subset to the relevant fields. A + groupby function is then applied to the subset + to create a multi-index (hierarchy) by the + groups. An aggregate function is then applied + to the non-grouped column (e.g. count, sum). + + One transform for one VType. + """ + transform = self.transformList.pop() + name, _vtype, method, accessors = ( + transform["name"], + transform["vtype"], + transform["method"], + transform["accessors"], + ) + vtype = getattr(vtypes, _vtype)() + + self.transformed = [] + if vtype.isvalid(df, accessors): + temp = df[ + list(set(accessor["field"] for key, accessor in accessors.items())) + ] + groups, value, func = method["groups"], method["value"], method["func"] + grouped = temp.groupby(groups, as_index=False) + transformed = getattr(grouped, func)() + + for record in transformed.to_dict("records"): + record = { + key: self._setValueType(vtype, record, key, accessor) + for key, accessor in accessors.items() + } + record = {"name": name} | record + self.transformed.append(record) + + else: + for error in vtype.validation_errors: + self.logger.warning(f"{error}") + + if len(vtype.validation_errors) == 0: + self.logger.info(f"{self.key}:Complete - simpleTransform") + + return self + + def compoundTransform(self: object, df: pd.DataFrame) -> object: + """ + For each transform, performs a pd.DataFrame.groupby + transform. The df is first subset to the relevant + fields. A groupby function is then applied to the + subset to create a multi-index (hierarchy) by the + groups. An aggregate function is then applied to the + non-grouped column (e.g. count, sum). + + All transforms are combined into a single flat + transform. Transforms must be identical VType, + e.g. [transformA, transformB, ...] + """ + self.transformed = [] + + for transform in self.transformList: + name, vtype, method, accessors = ( + transform["name"], + getattr(vtypes, transform["vtype"])(), + transform["method"], + transform["accessors"], + ) + if vtype.isvalid(df, accessors): + temp = df[ + list(set(accessor["field"] for key, accessor in accessors.items())) + ] + groups, value, func = method["groups"], method["value"], method["func"] + grouped = temp.groupby(groups, as_index=False) + transformed = getattr(grouped, func)() + + for record in transformed.to_dict("records"): + record = { + key: self._setValueType(vtype, record, key, accessor) + for key, accessor in accessors.items() + } + record = {"name": name} | record + self.transformed.append(record) + + else: + for error in vtype.validation_errors: + self.logger.warning(f"{error}") + + if len(vtype.validation_errors) == 0: + self.logger.info(f"{self.key}:Complete - compoundTransform") + + return self + + def mixedTransform(self: object, df: pd.DataFrame) -> object: + """ + For each transform, performs a pd.DataFrame.groupby + transform. The df is first subset to the relevant + fields. A groupby function is then applied to the + subset to create a multi-index (hierarchy) by the + groups. An aggregate function is then applied to the + non-grouped column (e.g. count, sum). + + Transforms are kept distinct inserted into a dictionary, + e.g. {nameA: transformA, nameB: transformB, ...}. + Transforms can be heterogenous VTypes. + """ + self.transformed = {} + + for transform in self.transformList: + name, vtype, method, accessors = ( + transform["name"], + getattr(vtypes, transform["vtype"])(), + transform["method"], + transform["accessors"], + ) + if vtype.isvalid(df, accessors): + temp = df[ + list(set(accessor["field"] for key, accessor in accessors.items())) + ] + groups, value, func = method["groups"], method["value"], method["func"] + grouped = temp.groupby(groups, as_index=False) + transformed = getattr(grouped, func)() + + subtransform = [] + for record in transformed.to_dict("records"): + record = { + key: self._setValueType(vtype, record, key, accessor) + for key, accessor in accessors.items() + } + record = {"name": name} | record + subtransform.append(record) + self.transformed[name] = subtransform + + else: + for error in vtype.validation_errors: + self.logger.warning(f"{error}") + + if len(vtype.validation_errors) == 0: + self.logger.info(f"{self.key}:Complete - mixedTransform") + + return self + + +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py new file mode 100644 index 00000000..a2073f77 --- /dev/null +++ b/modules/etl/transforms/redcap_transform.py @@ -0,0 +1,763 @@ +# Library Modules +from typing import Any, Callable, Union, List, Dict, Tuple +import re, os, csv, json, logging + +# Third Party Modules +from redcap import Project +import pandas as pd +import numpy as np + +class RedcapTransform(object): + def __init__(self, config: dict) -> None: + # + # Config + # + + # REDCap API Config + self.redcap_api_url = config["redcap_api_url"] + self.redcap_api_key = config["redcap_api_key"] + + # Set Transform Key + self.key = config["key"] if "key" in config else "redcap-transform" + + # Data Config + self.index_columns = ( + config["index_columns"] if "index_columns" in config else ["record_id"] + ) + + # REDCap Reports Config + self.reports_configs = config["reports"] if "reports" in config else [] + + # Report Merging + self.merge_transformed_reports = ( + config["merge_transformed_reports"] + if "merge_transformed_reports" in config + else [] + ) + + # Post Merge Transforms + self.post_merge_transforms = ( + config["post_merge_transforms"] if "post_merge_transforms" in config else [] + ) + + # Column Value Separator + self.multivalue_separator = ( + config["multivalue_separator"] if "multivalue_separator" in config else "|" + ) + + # CSV Float Format (Default: "%.2f") + self.csv_float_format = ( + config["csv_float_format"] if "csv_float_format" in config else "%.2f" + ) + + self.missing_value_generic = ( + config["missing_value_generic"] + if "missing_value_generic" in config + else "Value Unavailable" + ) + + # Logging Config + self.logging_config = ( + config["logging_config"] + if "logging_config" in config + else { + "encoding": "utf-8", + "filename": "REDCapETL.log", + "level": logging.DEBUG, + } + ) + + # Configure Logging + logging.basicConfig(**self.logging_config) + self.logger = logging.getLogger("RedcapTransform") + + # + # REDCap Parsing Variables + # + + # Regex Complex Field Parsers + self._field_rgx = {} + self._field_rgx["radio"] = re.compile(r"^[0-9\.]{1,17}") + self._field_rgx["checkbox"] = re.compile(r"^[0-9\.]{1,17}") + self._field_rgx["dropdown"] = re.compile(r"^[0-9\.]{1,17}") + self._field_rgx["yesno"] = re.compile(r"^[0-9\.]{1,17}") + self._field_rgx["text"] = re.compile( + r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}" + ) + self._field_rgx["descriptive"] = re.compile( + r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}" + ) + self._field_rgx["notes"] = re.compile( + r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}" + ) + self._field_rgx["file"] = None + self._field_rgx["signature"] = None + self._field_rgx["calc"] = None + + # General Parsing Variables + self.none_values = [ + np.nan, + pd.NaT, + None, + "nan", + "NaN", + "-", + "", + self.missing_value_generic, + ] + self.none_map = {key: self.missing_value_generic for key in self.none_values} + self.survey_instrument_map = { + "2": "Complete", + "1": "Unverified", + "0": "Incomplete", + "": self.missing_value_generic, + } + + self.logger.info(f"Initialized") + + # + # PyCap Initialization + # + + # Initialize PyCap Objects + self.logger.info(f"Retrieving REDCap project data") + self.project = Project(self.redcap_api_url, self.redcap_api_key) + self.metadata = self.project.export_metadata() + self.repeat_events_data = self.project.export_repeating_instruments_events() + + # + # Setup Reports & Apply Transforms + # + + # Internal Defaults + # - Key Assumptions for Transform Functions + # – Only Update if REDCap API Updates + self._reports_kwdargs = {} + self._reports_kwdargs["raw_or_label"] = "raw" + self._reports_kwdargs["raw_or_label_headers"] = "raw" + self._reports_kwdargs["export_checkbox_labels"] = False + self._reports_kwdargs["csv_delimiter"] = "\t" + + # Get & Structure Report + self.logger.info(f"Retrieving REDCap reports") + self.reports = {} + for report_name, report_kwdargs, transforms in self.reports_configs: + # Get Report + report_kwdargs = report_kwdargs | self._reports_kwdargs + report = self.project.export_report(**report_kwdargs) + # Structure Reports + self.reports[report_name] = { + "id": report_kwdargs["report_id"], + "report": report, + "df": pd.DataFrame(report), + "transforms": transforms, + "transformed": None, + "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), + } + + # Generate Transformed Report + self.logger.info(f"Applying REDCap report transforms") + for report_name, report_object in self.reports.items(): + self._apply_report_transforms(report_name) + + # Merge Reports + self.logger.info(f"Merging REDCap reports") + receiving_report_name, merge_steps = self.merge_transformed_reports + self.merged = self._merge_reports(receiving_report_name, merge_steps) + + # Apply Post-Merge Transforms + self.logger.info(f"Applying REDCap report post-merge transforms") + for transform, transform_kwdargs in self.post_merge_transforms: + self.merged = self.apply_transform( + self.merged, transform, transform_kwdargs + ) + + self.logger.info(f"REDCap transforms complete") + + return + + # + # Getters + # + + def get_report_id(self: object, report_name: str) -> str: + """ + Returns a str instance of the REDCap report ID. + """ + return self.reports[report_name]["id"] + + def get_report_pycap( + self: object, report_name: str + ) -> Union[List[Dict[str, Any]], str, pd.DataFrame]: + """ + Returns a PyCap Report object containing the report. + """ + return self.reports[report_name]["report"] + + def get_report_df(self: object, report_name: str) -> pd.DataFrame: + """ + Returns a pd.DataFrame instance containing the report. + """ + return self.reports[report_name]["df"] + + def get_report_transformed_df(self: object, report_name: str) -> pd.DataFrame: + """ + Returns a pd.DataFrame instance containing the report + with normalization transforms applied. + """ + return self.reports[report_name]["transformed"] + + def get_report_transforms( + self: object, report_name: str + ) -> List[Tuple[str, Dict[str, Any]]]: + """ + Returns a list of transforms that will be applied to + the report + """ + return self.reports[report_name]["transforms"] + + def get_report_annotations(self: object, report_name: str) -> List[Dict[str, Any]]: + """ + Returns a list of annotations generated from the + REDCap metadata API call. + """ + return self.reports[report_name]["annotations"] + + # + # Transform Applicator + # + + # Applies Declared Transforms to Reports + def _apply_report_transforms(self: object, report_name: str) -> None: + """ + Interal method that applies the transforms to each + report as an idempotent transform stack. + """ + report = self.reports[report_name] + annotation = report["annotation"] + report["transformed"] = report["df"] + for transform in report["transforms"]: + transform_name, transform_kwdargs = transform + transform_kwdargs = transform_kwdargs | {"annotation": annotation} + report["transformed"] = self.apply_transform( + report["transformed"], transform_name, transform_kwdargs + ) + + return self + + def apply_transform( + self: object, + df: pd.DataFrame, + transform_name: str, + transform_kwdargs: Dict[str, Any] = {}, + ) -> pd.DataFrame: + return getattr(self, f"_{transform_name}")(df, **transform_kwdargs) + + # + # Transforms - Columns + # + + # + # Drop Columns + # + + def _drop_columns( + self: object, + df: pd.DataFrame, + columns: List[str] = [], + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + df = df.drop(columns=columns) + return df + + @classmethod + def drop_columns( + self: object, df: pd.DataFrame, columns: List[str] + ) -> pd.DataFrame: + """ + Drop columns from pd.DataFrame. + """ + return self._drop_columns(df=df, columns=columns) + + # + # Keep Columns + # + + def _keep_columns( + self: object, + df: pd.DataFrame, + columns: List[str] = [], + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = set(df.columns) - set( + self._resolve_columns_with_dataframe(df=df, columns=columns) + ) + df = df.drop(columns=columns) + return df + + @classmethod + def keep_columns( + self: object, df: pd.DataFrame, columns: List[str] + ) -> pd.DataFrame: + """ + Keep only selected columns in pd.DataFrame. + """ + return self._keep_columns(df=df, columns=columns) + + # + # Transform - Append Column Prefix + # + + def _append_column_suffix( + self: object, + df: pd.DataFrame, + columns: List[str] = [], + suffix: str = "", + separator: str = "", + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + df[columns] = df[columns].rename( + mapper=lambda name: f"{namer}{separator}{suffix}" + ) + return df + + @classmethod + def append_column_suffix( + self: object, + df: pd.DataFrame, + columns: List[str] = [], + suffix: str = "", + separator: str = "", + ) -> pd.DataFrame: + """ + Append a suffix to columns of pd.DataFrame. Note: If no + columns parameter is provided, the suffix is applied every + column. If no suffix is provided, the column names remain + unchanged. A separator argument allows for the expansion + of column names by one or more characters, e.g. "_" for + snakecase. + """ + return self._prepend_column_suffix( + df=df, columns=transform_columns, suffix=suffix, separator=separator + ) + + # + # Transform - Prepend Column Prefix + # + + def _prepend_column_prefix( + self: object, + df: pd.DataFrame, + columns: List[str] = [], + prefix: str = "", + separator: str = "", + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + df[columns] = df[columns].rename( + mapper=lambda name: f"{prefix}{separator}{name}" + ) + return df + + @classmethod + def prepend_column_prefix( + self: object, + df: pd.DataFrame, + columns: List[str] = [], + prefix: str = "", + separator: str = "", + ) -> pd.DataFrame: + """ + Append a prefix to columns of pd.DataFrame. Note: If no + columns parameter is provided, the prefix is applied every + column. If no prefix is provided, the column names remain + unchanged. A separator argument allows for the expansion + of column names by one or more characters, e.g. "_" for + snakecase. + """ + return self._prepend_column_prefix( + df=df, columns=columns, prefix=prefix, separator=separator + ) + + # + # Transforms - Remap Values by Columns + # + + def _remap_values_by_columns( + self: object, + df: pd.DataFrame, + columns: List[str], + value_map: Dict[str, Any] = {}, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + # Resolve Mappable Fields and Available Value Maps + columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + + if len(value_map) > 0: + mappable_fields = [ + {"name": column, "options": value_map} for column in columns + ] + else: + mappable_fields = [ + field + for field in annotation + if field["options"] is not None and field["name"] in columns + ] + + for mappable_field in mappable_fields: + column, value_map = mappable_field["name"], mappable_field["options"] + for i, value in enumerate(df[column]): + subvalues = [ + subvalue.strip() + for subvalue in str(value).split(",") + if len(subvalue) > 0 + ] + df[column][i] = self.multivalue_separator.join( + [ + value_map[subvalue] + for subvalue in subvalues + if subvalue in value_map.keys() + ] + ) + + return df + + @classmethod + def remap_values_by_columns( + self: object, + df: pd.DataFrame, + columns: List[str], + value_map: Dict[str, Any] = {}, + ) -> pd.DataFrame: + """ + Remap values by column using a list of annotations. + Each annotation is a dictionary containing a the + following keys: "name", "type", and "options". Key + to this method are then "name" and "options" entries. + The value of the "name" corresponds to the + pd.DataFrame column name. The value of the"options" + entry is a value_map object generated from the + REDCapo metadata API request: + + annotation = { + "name": field["field_name"], + "type": field["field_type"], + "options": field["field_options"] + } + + If multiple values are found in the field, they will + be mapped with a separator. The default separator is + a pipe (i.e. "|"). + + Returns a transformed pd.DataFrame + """ + return self._remap_values_by_columns( + df=df, columns=columns, value_map=value_map + ) + + # + # Transform - Map Missing Values By Columns + # + + def _map_missing_values_by_columns( + self: object, + df: pd.DataFrame, + columns: List[str], + missing_value: Any = None, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + missing_value = ( + missing_value if missing_value is not None else self.missing_value_generic + ) + for column in columns: + for i, value in enumerate(df[column]): + if (len(str(value)) == 0) or (value in self.none_map.keys()): + df[column][i] = missing_value + else: + continue + + return df + + @classmethod + def map_missing_values_by_columns( + self: object, df: pd.DataFrame, columns: List[str], missing_value: Any + ) -> pd.DataFrame: + """ + Replace 0-length values or values with keys in + self.none_map with self.missing_value_generic. + """ + return self._map_missing_values_by_columns( + df=df, columns=columns, missing_value=missing_value + ) + + # + # Transforms - Rows + # + + # + # Drop Rows + # + + def _drop_rows( + self: object, + df: pd.DataFrame, + columns: List[str] = [], + condition: Callable = lambda column: column == "", + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] + return df + + @classmethod + def drop_rows( + self: object, + df: pd.DataFrame, + columns: List[str], + condition: Callable = lambda column: column == "", + ) -> pd.DataFrame: + """ + Drop rows from pd.DataFrame. + """ + return self._drop_rows(df=df, columns=columns) + + # + # Transforms - Aggregation + # + + # + # Transforms - Aggregate Repeat Instruments by Index + # + + def _aggregate_repeat_instrument_column_by_index( + self: object, + df: pd.DataFrame, + aggregator: Callable = "max", + dtype: Callable = float, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + new_columns = df["redcap_repeat_instrument"].unique() + pivot = pd.pivot_table( + df, + index=self.index_columns, + columns=["redcap_repeat_instrument"], + values="redcap_repeat_instance", + aggfunc=aggregator, + fill_value=self.missing_value_generic, + ) + df = df.merge(pivot, how="inner", on=self.index_columns) + df = df.drop_duplicates(self.index_columns, keep="first") + for column in new_columns: + df[column] = df[column].astype(dtype) + return df + + @classmethod + def aggregate_repeat_instrument_by_index( + self: object, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float + ) -> pd.DataFrame: + """ + Pre-processing REDCap repeat_instrument so each instrument + has its own column and the value. The value is computed + using an aggregation function applied to the repeat_instance + field. + """ + return self._aggregate_repreat_instrument_by_index( + df=df, aggregator=aggregator, dtype=dtype + ) + + # + # Report Merging + # + + def _merge_reports( + self: object, + receiving_report_name: str, + merge_steps: List[Tuple[str, Dict[str, Any]]], + ) -> pd.DataFrame: + """ + Performns N - 1 merge transforms on N reports. + """ + + df_receiving_report = self.reports[receiving_report_name]["transformed"] + + if len(merge_steps) > 0: + for providing_report_name, merge_kwdargs in merge_steps: + df_providing_report = self.reports[providing_report_name]["transformed"] + df_receiving_report = df_receiving_report.merge( + df_providing_report, **merge_kwdargs + ) + else: + self.logger.warn( + f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." + ) + + return df_receiving_report + + # + # Utilities + # + + # Transform Prelude - Get Applicable Transform Columns + def _resolve_columns_with_dataframe( + self: object, df: pd.DataFrame, columns: List[str] + ) -> List[str]: + """ + Internal utility function. Uses set logic to ensure + requested columns are available within the target + pd.DataFrame. + """ + available_columns, requested_columns = set(df.columns), set(columns) + resolved_columns = [] + + if len(requested_columns) == 0: + self.logger.warn( + f"Unexpected Transform – columns parameter has no values. Defaulting to df.columns" + ) + resolved_columns = [*available_columns] + elif len(available_columns & requested_columns) == 0: + self.logger.warn( + f"Unexpected Transform – none of the values in the columns parameter were found in df.columns. Defaulting to df.columns" + ) + resolved_columns = [*available_columns] + elif len(requested_columns - available_columns) > 0: + self.logger.warn( + f"Unexpected Transform – df.columns missing values present in columns parameter: {', '.join([*requested_columns - available_columns])}. Continuing with union." + ) + resolved_columns = [*(available_columns & requested_columns)] + else: + resolved_columns = [*requested_columns] + + return resolved_columns + + # Extract REDCap Type Metadata + def _get_redcap_type_metadata( + self: object, df: pd.DataFrame + ) -> List[Dict[str, Any]]: + """ + Extracts REDCap field name, type, and options (the + metadata) for each column in the target pd.DataFrame + """ + + # REDCap Internal Variable Metadata + metadata = [ + {"name": "redcap_data_access_group", "type": "text", "options": None}, + {"name": "redcap_repeat_instrument", "type": "text", "options": None}, + {"name": "redcap_repeat_instance", "type": "number", "options": None}, + ] + + field_types = set(field["field_type"] for field in self.metadata) + complex_types = {"dropdown", "radio", "checkbox"} + binary_types = {"yesno"} + text_types = {"text"} + skip_types = {"file", "calc", "descriptive", "notes"} + + # Get Column Metadata + columns = df.columns.tolist() + for field in sorted(self.metadata, key=lambda f: f["field_name"]): + if field["field_name"] in columns: + field_type = field["field_type"] + options = {} + if field_type in complex_types: + rgx = self._field_rgx[field_type] + for option in field["select_choices_or_calculations"].split("|"): + k, v = ( + option.split(",")[0], + (",".join(option.split(",")[1:])).strip(), + ) + k = int(k) if re.match(rgx, k) else str(k) + v = int(v) if re.match(rgx, v) else str(v) + options[str(k)] = v + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": options | self.none_map, + } + ) + elif field_type in binary_types: + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": {"1": "Yes", "0": "No"} | self.none_map, + } + ) + elif field_type in text_types: + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": None, + } + ) + elif field_type in skip_types: + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": None, + } + ) + else: + continue + + return metadata + + # + # Exports + # + + # Export Untransformed (Raw) Reports + def export_raw( + self: object, path: str = "", separator: str = "\t", filetype: str = ".tsv" + ) -> object: + for report_name, report_object in self.reports.items(): + filename = f"{report_name}_raw{filetype}" + filepath = os.path.join(path, filename) + transformed = report_object["df"] + transformed.to_csv( + filepath, + sep=separator, + quoting=csv.QUOTE_NONNUMERIC, + float_format=self.csv_float_format, + ) + return self + + # Export Transformed Reports + def export_transformed( + self: object, path: str = "", separator: str = "\t", filetype: str = ".tsv" + ) -> object: + for report_name, report_object in self.reports.items(): + filename = f"{report_name}_transformed{filetype}" + filepath = os.path.join(path, filename) + transformed = report_object["transformed"] + transformed.to_csv( + filepath, + sep=separator, + quoting=csv.QUOTE_NONNUMERIC, + float_format=self.csv_float_format, + ) + return self + + # Export Merged Transforms + def export_merged_transformed( + self: object, path: str = "", separator: str = "\t", filetype: str = ".tsv" + ) -> object: + filename = f"transformed-merged_redcap-extract{filetype}" + filepath = os.path.join(path, filename) + self.merged.to_csv( + filepath, + sep=separator, + quoting=csv.QUOTE_NONNUMERIC, + float_format=self.csv_float_format, + ) + return self + + +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/vtypes/__init__.py b/modules/etl/vtypes/__init__.py new file mode 100644 index 00000000..c1d0804b --- /dev/null +++ b/modules/etl/vtypes/__init__.py @@ -0,0 +1,11 @@ +from .vtype import VType +from .categorical import SingleCategorical, DoubleCategorical +from .discrete import SingleDiscrete, DoubleDiscrete +from .continuous import SingleContinuous, DoubleContinuous +from .timeseries import ( + SingleTimeseries, + DoubleDiscreteTimeseries, + DoubleContinuousTimeseries, +) +from .compound import Compound +from .mixed import Mixed diff --git a/modules/etl/vtypes/categorical.py b/modules/etl/vtypes/categorical.py new file mode 100644 index 00000000..1f5f7870 --- /dev/null +++ b/modules/etl/vtypes/categorical.py @@ -0,0 +1,36 @@ +from .vtype import VType + + +class SingleCategorical(VType): + def __init__(self: object) -> None: + super().__init__( + "SingleCategorical", + [ + ("filterby", str), + ("group", str), + ("color", str), + ("value", int), + ], + str, + ) + + +class DoubleCategorical(VType): + def __init__(self: object) -> None: + super().__init__( + "DoubleCategorical", + [ + ("filterby", str), + ("group", str), + ("subgroup", str), + ("color", str), + ("value", int), + ], + str, + ) + + +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/vtypes/compound.py b/modules/etl/vtypes/compound.py new file mode 100644 index 00000000..4ca25167 --- /dev/null +++ b/modules/etl/vtypes/compound.py @@ -0,0 +1,50 @@ +from .vtype import VType +from .categorical import SingleCategorical, DoubleCategorical +from .discrete import SingleDiscrete, DoubleDiscrete +from .continuous import SingleContinuous, DoubleContinuous +from .timeseries import ( + SingleTimeseries, + DoubleDiscreteTimeseries, + DoubleContinuousTimeseries, +) +from typing import Tuple, List, Dict, Callable, Any +import pandas as pd + + +class Compound(VType): + def __init__(self: object) -> None: + raise NotImplementedError + super().__init__( + "Compound", + [ + SingleCategorical, + DoubleCategorical, + SingleDiscrete, + DoubleDiscrete, + SingleContinuous, + DoubleContinuous, + SingleTimeseries, + DoubleDiscreteTimeseries, + DoubleContinuousTimeseries, + ], + str, + ) + + def isvalid( + self: object, dfs: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + ) -> bool: + """ + Extends the VType.isvalid method to operate on a list + of pd.DataFrames and accessors. + """ + valid = True + for accessors in accessorsList: + if not super(Compound, self).isvalid(df, accessors): + self.validation_errors.append( + f"VType {self.name.title()} has invalid accessors. See additional details above." + ) + valid = False + else: + continue + + return valid diff --git a/modules/etl/vtypes/continuous.py b/modules/etl/vtypes/continuous.py new file mode 100644 index 00000000..ce80a011 --- /dev/null +++ b/modules/etl/vtypes/continuous.py @@ -0,0 +1,31 @@ +from .vtype import VType + + +class SingleContinuous(VType): + def __init__(self: object) -> None: + super().__init__( + "SingleContinuous", + [("filterby", str), ("group", str), ("color", str), ("x", float)], + float, + ) + + +class DoubleContinuous(VType): + def __init__(self: object) -> None: + super().__init__( + "DoubleContinuous", + [ + ("filterby", str), + ("group", str), + ("color", str), + ("x", float), + ("y", float), + ], + float, + ) + + +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/vtypes/discrete.py b/modules/etl/vtypes/discrete.py new file mode 100644 index 00000000..429d81bc --- /dev/null +++ b/modules/etl/vtypes/discrete.py @@ -0,0 +1,25 @@ +from .vtype import VType + + +class SingleDiscrete(VType): + def __init__(self: object) -> None: + super().__init__( + "SingleDiscrete", + [("filterby", str), ("group", str), ("color", str), ("x", int)], + int, + ) + + +class DoubleDiscrete(VType): + def __init__(self: object) -> None: + super().__init__( + "Discrete", + [ + ("filterby", str), + ("group", str), + ("color", str), + ("x", int), + ("y", int), + ], + int, + ) diff --git a/modules/etl/vtypes/mixed.py b/modules/etl/vtypes/mixed.py new file mode 100644 index 00000000..35a1b842 --- /dev/null +++ b/modules/etl/vtypes/mixed.py @@ -0,0 +1,56 @@ +from typing import Any, Callable, Union, List, Dict, Tuple +from .vtype import VType +from .categorical import SingleCategorical, DoubleCategorical +from .discrete import SingleDiscrete, DoubleDiscrete +from .continuous import SingleContinuous, DoubleContinuous +from .timeseries import ( + SingleTimeseries, + DoubleDiscreteTimeseries, + DoubleContinuousTimeseries, +) +from .compound import Compound +import pandas as pd + + +class Mixed(VType): + def __init__(self: object) -> None: + raise NotImplementedError + super().__init__( + "Mixed", + [ + SingleCategorical, + DoubleCategorical, + SingleDiscrete, + DoubleDiscrete, + SingleContinuous, + DoubleContinuous, + SingleTimeseries, + DoubleDiscreteTimeseries, + DoubleContinuousTimeseries, + Compound, + ], + str, + ) + + def isvalid( + self: object, dfs: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + ) -> bool: + """ + Extends the VType.isvalid method to operate on a list + of pd.DataFrames and accessors. + """ + valid = True + for accessors in accessorsList: + if not super(Compound, self).isvalid(df, accessors): + self.validation_errors.append( + f"VType {self.name.title()} has invalid accessors. See additional details above." + ) + valid = False + else: + continue + + +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py new file mode 100644 index 00000000..7bb4b3fa --- /dev/null +++ b/modules/etl/vtypes/timeseries.py @@ -0,0 +1,48 @@ +from .vtype import VType +import pandas as pd +from datetime import datetime + + +class SingleTimeseries(VType): + def __init__(self: object) -> None: + super().__init__( + "SingleTimeseries", + [("filterby", str), ("subgroup", str), ("color", str), ("x", datetime)], + pd._libs.tslibs.nattype.NaTType, + ) + + +class DoubleDiscreteTimeseries(VType): + def __init__(self: object) -> None: + super().__init__( + "DoubleDiscreteTimeseries", + [ + ("filterby", str), + ("subgroup", str), + ("color", str), + ("x", datetime), + ("y", int), + ], + pd._libs.tslibs.nattype.NaTType, + ) + + +class DoubleContinuousTimeseries(VType): + def __init__(self: object) -> None: + super().__init__( + "DoubleContinuousTimeseries", + [ + ("filterby", str), + ("subgroup", str), + ("color", str), + ("x", datetime), + ("y", float), + ], + pd._libs.tslibs.nattype.NaTType, + ) + + +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py new file mode 100644 index 00000000..e7775ed0 --- /dev/null +++ b/modules/etl/vtypes/vtype.py @@ -0,0 +1,46 @@ +from typing import Any, Callable, Union, List, Dict, Tuple +from datetime import datetime +import pandas as pd + + +class VType(object): + def __init__( + self: object, + name: str, + props: List[Tuple[str, Callable]], + missing_value: Callable, + ) -> None: + self.name = name + self.props = props + self.missing_value = missing_value + self.validation_errors = [] + + def __str__(self): + return f"{self.__dict__}" + + def isvalid( + self: object, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]] + ) -> bool: + columns = df.columns + for pname, ptype in self.props: + if pname in accessors.keys(): + column = accessors[pname]["field"] + if column not in columns: + self.validation_errors.append( + f"VType {self.name.title()} pd.DataFrame argument (df) is missing column defined in accessors argument, {column}" + ) + return False + else: + continue + else: + self.validation_errors.append( + f"VType {self.name.title()} accessors argument is missing required property, {pname}" + ) + return False + return True + + +if __name__ == "__main__": + pass +else: + pass diff --git a/poetry.lock b/poetry.lock index 5b50c575..cacb851b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -17,25 +17,25 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "3.7.1" +version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] [[package]] name = "appnope" @@ -51,23 +51,24 @@ files = [ [[package]] name = "argon2-cffi" -version = "21.3.0" -description = "The secure Argon2 password hashing algorithm." +version = "23.1.0" +description = "Argon2 for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, ] [package.dependencies] argon2-cffi-bindings = "*" [package.extras] -dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] -docs = ["furo", "sphinx", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] [[package]] name = "argon2-cffi-bindings" @@ -109,29 +110,34 @@ tests = ["pytest"] [[package]] name = "arrow" -version = "1.2.3" +version = "1.3.0" description = "Better dates & times for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, ] [package.dependencies] python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] [[package]] name = "art" -version = "6.0" +version = "6.1" description = "ASCII Art Library For Python" category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "art-6.0-py3-none-any.whl", hash = "sha256:882e6b5a4c6045c6dace31147c0b8522c8ca70177a5922f172733d116123a187"}, - {file = "art-6.0.tar.gz", hash = "sha256:884ef1c10e900387cce97789e8668125720cbf4459206641b91fc298c859cda9"}, + {file = "art-6.1-py3-none-any.whl", hash = "sha256:159819c418001467f8d79616fa0814277deac97c8a363d1eb3e7c0a31526bfc3"}, + {file = "art-6.1.tar.gz", hash = "sha256:6ab3031e3b7710039e73497b0e750cadfe04d4c1279ce3a123500dbafb9e1b64"}, ] [package.extras] @@ -139,14 +145,14 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture [[package]] name = "astroid" -version = "2.15.6" +version = "2.15.8" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, - {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, + {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, + {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, ] [package.dependencies] @@ -159,18 +165,18 @@ wrapt = [ [[package]] name = "asttokens" -version = "2.2.1" +version = "2.4.0" description = "Annotate AST trees with source code positions" category = "dev" optional = false python-versions = "*" files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, + {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"}, + {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"}, ] [package.dependencies] -six = "*" +six = ">=1.12.0" [package.extras] test = ["astroid", "pytest"] @@ -190,6 +196,18 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + [[package]] name = "attrs" version = "23.1.0" @@ -211,18 +229,21 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "babel" -version = "2.12.1" +version = "2.13.1" description = "Internationalization utilities" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, + {file = "Babel-2.13.1-py3-none-any.whl", hash = "sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed"}, + {file = "Babel-2.13.1.tar.gz", hash = "sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900"}, ] [package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} +setuptools = {version = "*", markers = "python_version >= \"3.12\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "backcall" @@ -292,34 +313,30 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.7.0" +version = "23.10.1" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"}, + {file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"}, + {file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"}, + {file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"}, + {file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"}, + {file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"}, + {file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"}, + {file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"}, + {file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"}, + {file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"}, + {file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"}, + {file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"}, + {file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"}, + {file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"}, + {file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"}, ] [package.dependencies] @@ -329,7 +346,7 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -339,14 +356,14 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bleach" -version = "6.0.0" +version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, - {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"}, + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, ] [package.dependencies] @@ -354,18 +371,30 @@ six = ">=1.9.0" webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.2)"] +css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "blinker" -version = "1.6.2" +version = "1.6.3" description = "Fast, simple object-to-object and broadcast signaling" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, - {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, + {file = "blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa"}, + {file = "blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d"}, +] + +[[package]] +name = "cachelib" +version = "0.9.0" +description = "A collection of cache libraries in the same API interface." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, + {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, ] [[package]] @@ -382,76 +411,64 @@ files = [ [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -459,99 +476,114 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, + {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, + {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, + {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, + {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, + {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, + {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, + {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, ] [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -571,18 +603,18 @@ files = [ [[package]] name = "comm" -version = "0.1.3" +version = "0.1.4" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"}, - {file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"}, + {file = "comm-0.1.4-py3-none-any.whl", hash = "sha256:6d52794cba11b36ed9860999cd10fd02d6b2eac177068fdd585e1e2f8a96e67a"}, + {file = "comm-0.1.4.tar.gz", hash = "sha256:354e40a59c9dd6db50c5cc6b4acc887d82e9603787f83b68c01a80a923984d15"}, ] [package.dependencies] -traitlets = ">=5.3" +traitlets = ">=4" [package.extras] lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] @@ -591,72 +623,64 @@ typing = ["mypy (>=0.990)"] [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.2" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, ] [package.dependencies] @@ -686,30 +710,30 @@ requests = ">=2.28,<3.0" [[package]] name = "debugpy" -version = "1.6.7" +version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"}, - {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"}, - {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"}, - {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"}, - {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"}, - {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"}, - {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"}, - {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"}, - {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"}, - {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"}, - {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"}, - {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"}, - {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"}, - {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"}, - {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"}, - {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"}, - {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"}, - {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"}, + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, ] [[package]] @@ -796,14 +820,14 @@ files = [ [[package]] name = "email-validator" -version = "2.0.0.post2" +version = "2.1.0.post1" description = "A robust email address syntax and deliverability validation library." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "email_validator-2.0.0.post2-py3-none-any.whl", hash = "sha256:2466ba57cda361fb7309fd3d5a225723c788ca4bbad32a0ebd5373b99730285c"}, - {file = "email_validator-2.0.0.post2.tar.gz", hash = "sha256:1ff6e86044200c56ae23595695c54e9614f4a9551e0e393614f764860b3d7900"}, + {file = "email_validator-2.1.0.post1-py3-none-any.whl", hash = "sha256:c973053efbeddfef924dc0bd93f6e77a1ea7ee0fce935aea7103c7a3d6d2d637"}, + {file = "email_validator-2.1.0.post1.tar.gz", hash = "sha256:a4b0bd1cf55f073b924258d19321b1f3aa74b4b5a71a42c305575dba920e1a44"}, ] [package.dependencies] @@ -812,14 +836,14 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -827,18 +851,18 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "1.2.0" +version = "2.0.0" description = "Get the currently executing AST node of a frame, and other information" category = "dev" optional = false python-versions = "*" files = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, + {file = "executing-2.0.0-py2.py3-none-any.whl", hash = "sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657"}, + {file = "executing-2.0.0.tar.gz", hash = "sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"}, ] [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "faker" @@ -857,14 +881,14 @@ python-dateutil = ">=2.4" [[package]] name = "fastjsonschema" -version = "2.18.0" +version = "2.18.1" description = "Fastest Python implementation of JSON schema" category = "dev" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.18.0-py3-none-any.whl", hash = "sha256:128039912a11a807068a7c87d0da36660afbfd7202780db26c4aa7153cfdc799"}, - {file = "fastjsonschema-2.18.0.tar.gz", hash = "sha256:e820349dd16f806e4bd1467a138dced9def4bc7d6213a34295272a6cac95b5bd"}, + {file = "fastjsonschema-2.18.1-py3-none-any.whl", hash = "sha256:aec6a19e9f66e9810ab371cc913ad5f4e9e479b63a7072a2cd060a9369e329a8"}, + {file = "fastjsonschema-2.18.1.tar.gz", hash = "sha256:06dc8680d937628e993fa0cd278f196d20449a1adc087640710846b324d422ea"}, ] [package.extras] @@ -889,23 +913,22 @@ pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "flask" -version = "2.3.2" +version = "2.3.3" description = "A simple framework for building complex web applications." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Flask-2.3.2-py3-none-any.whl", hash = "sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0"}, - {file = "Flask-2.3.2.tar.gz", hash = "sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef"}, + {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, + {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, ] [package.dependencies] blinker = ">=1.6.2" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" -Werkzeug = ">=2.3.3" +Werkzeug = ">=2.3.7" [package.extras] async = ["asgiref (>=3.2)"] @@ -927,6 +950,22 @@ files = [ bcrypt = ">=3.1.1" Flask = "*" +[[package]] +name = "flask-caching" +version = "2.1.0" +description = "Adds caching support to Flask applications." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Flask-Caching-2.1.0.tar.gz", hash = "sha256:b7500c145135836a952e3de3a80881d9654e327a29c852c9265607f5c449235c"}, + {file = "Flask_Caching-2.1.0-py3-none-any.whl", hash = "sha256:f02645a629a8c89800d96dc8f690a574a0d49dcd66c7536badc6d362ba46b716"}, +] + +[package.dependencies] +cachelib = ">=0.9.0,<0.10.0" +Flask = "*" + [[package]] name = "flask-cors" version = "4.0.0" @@ -944,22 +983,23 @@ Flask = ">=0.9" [[package]] name = "flask-restx" -version = "1.1.0" +version = "1.2.0" description = "Fully featured framework for fast, easy and documented API development with Flask" category = "main" optional = false python-versions = "*" files = [ - {file = "flask-restx-1.1.0.tar.gz", hash = "sha256:62b6b6c9de65e5960cf4f8b35e1bd3eca6998838a01b2f71e2a9d4c14a4ccd14"}, - {file = "flask_restx-1.1.0-py2.py3-none-any.whl", hash = "sha256:eaff65f6edd400ee00b40475496d61a4937b28371dfed039d3fd180d206fee4e"}, + {file = "flask-restx-1.2.0.tar.gz", hash = "sha256:9a5338b108c57fbed1d24d5d53fe98442b2be7ffa2ff3291305af7a613ce6fc0"}, + {file = "flask_restx-1.2.0-py2.py3-none-any.whl", hash = "sha256:be3837378c51893adf94f632b6e458c4c2d54c6bc9f4210206ebc2fb1a70ada9"}, ] [package.dependencies] aniso8601 = ">=0.82" -Flask = ">=0.8,<2.0.0 || >2.0.0" -jsonschema = "*" +Flask = ">=0.8,<2.0.0 || >2.0.0,<3.0.0" +importlib-resources = "*" +jsonschema = "<=4.17.3" pytz = "*" -werkzeug = "!=2.0.0" +werkzeug = "<2.0.0 || >2.0.0,<3.0.0" [package.extras] dev = ["Faker (==2.0.0)", "black", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.2.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "tox", "twine (==3.8.0)", "tzlocal"] @@ -968,19 +1008,19 @@ test = ["Faker (==2.0.0)", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pyt [[package]] name = "flask-sqlalchemy" -version = "3.0.5" +version = "3.1.1" description = "Add SQLAlchemy support to your Flask application." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "flask_sqlalchemy-3.0.5-py3-none-any.whl", hash = "sha256:cabb6600ddd819a9f859f36515bb1bd8e7dbf30206cc679d2b081dff9e383283"}, - {file = "flask_sqlalchemy-3.0.5.tar.gz", hash = "sha256:c5765e58ca145401b52106c0f46178569243c5da25556be2c231ecc60867c5b1"}, + {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, + {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, ] [package.dependencies] flask = ">=2.2.5" -sqlalchemy = ">=1.4.18" +sqlalchemy = ">=2.0.16" [[package]] name = "fqdn" @@ -1029,80 +1069,73 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "greenlet" -version = "2.0.2" +version = "3.0.1" description = "Lightweight in-process concurrent programming" category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, -] - -[package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, + {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, + {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, + {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, + {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, + {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, + {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, + {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, + {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, + {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, + {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, + {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80ac992f25d10aaebe1ee15df45ca0d7571d0f70b645c08ec68733fb7a020206"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:337322096d92808f76ad26061a8f5fccb22b0809bea39212cd6c406f6a7060d2"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9934adbd0f6e476f0ecff3c94626529f344f57b38c9a541f87098710b18af0a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4d815b794fd8868c4d67602692c21bf5293a75e4b607bb92a11e821e2b859a"}, + {file = "greenlet-3.0.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41bdeeb552d814bcd7fb52172b304898a35818107cc8778b5101423c9017b3de"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6e6061bf1e9565c29002e3c601cf68569c450be7fc3f7336671af7ddb4657166"}, + {file = "greenlet-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fa24255ae3c0ab67e613556375a4341af04a084bd58764731972bcbc8baeba36"}, + {file = "greenlet-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:b489c36d1327868d207002391f662a1d163bdc8daf10ab2e5f6e41b9b96de3b1"}, + {file = "greenlet-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f33f3258aae89da191c6ebaa3bc517c6c4cbc9b9f689e5d8452f7aedbb913fa8"}, + {file = "greenlet-3.0.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d2905ce1df400360463c772b55d8e2518d0e488a87cdea13dd2c71dcb2a1fa16"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a02d259510b3630f330c86557331a3b0e0c79dac3d166e449a39363beaae174"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55d62807f1c5a1682075c62436702aaba941daa316e9161e4b6ccebbbf38bda3"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fcc780ae8edbb1d050d920ab44790201f027d59fdbd21362340a85c79066a74"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eddd98afc726f8aee1948858aed9e6feeb1758889dfd869072d4465973f6bfd"}, + {file = "greenlet-3.0.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eabe7090db68c981fca689299c2d116400b553f4b713266b130cfc9e2aa9c5a9"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f2f6d303f3dee132b322a14cd8765287b8f86cdc10d2cb6a6fae234ea488888e"}, + {file = "greenlet-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d923ff276f1c1f9680d32832f8d6c040fe9306cbfb5d161b0911e9634be9ef0a"}, + {file = "greenlet-3.0.1-cp38-cp38-win32.whl", hash = "sha256:0b6f9f8ca7093fd4433472fd99b5650f8a26dcd8ba410e14094c1e44cd3ceddd"}, + {file = "greenlet-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:990066bff27c4fcf3b69382b86f4c99b3652bab2a7e685d968cd4d0cfc6f67c6"}, + {file = "greenlet-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ce85c43ae54845272f6f9cd8320d034d7a946e9773c693b27d620edec825e376"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89ee2e967bd7ff85d84a2de09df10e021c9b38c7d91dead95b406ed6350c6997"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87c8ceb0cf8a5a51b8008b643844b7f4a8264a2c13fcbcd8a8316161725383fe"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6a8c9d4f8692917a3dc7eb25a6fb337bff86909febe2f793ec1928cd97bedfc"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fbc5b8f3dfe24784cee8ce0be3da2d8a79e46a276593db6868382d9c50d97b1"}, + {file = "greenlet-3.0.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85d2b77e7c9382f004b41d9c72c85537fac834fb141b0296942d52bf03fe4a3d"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:696d8e7d82398e810f2b3622b24e87906763b6ebfd90e361e88eb85b0e554dc8"}, + {file = "greenlet-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:329c5a2e5a0ee942f2992c5e3ff40be03e75f745f48847f118a3cfece7a28546"}, + {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, + {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, + {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, +] + +[package.extras] +docs = ["Sphinx"] test = ["objgraph", "psutil"] [[package]] @@ -1121,7 +1154,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1139,22 +1172,19 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-resources" -version = "6.0.0" +version = "6.1.0" description = "Read resources from Python packages" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"}, - {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"}, + {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, + {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, ] -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -1170,14 +1200,14 @@ files = [ [[package]] name = "ipykernel" -version = "6.25.0" +version = "6.26.0" description = "IPython Kernel for Jupyter" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.25.0-py3-none-any.whl", hash = "sha256:f0042e867ac3f6bca1679e6a88cbd6a58ed93a44f9d0866aecde6efe8de76659"}, - {file = "ipykernel-6.25.0.tar.gz", hash = "sha256:e342ce84712861be4b248c4a73472be4702c1b0dd77448bfd6bcfb3af9d5ddf9"}, + {file = "ipykernel-6.26.0-py3-none-any.whl", hash = "sha256:3ba3dc97424b87b31bb46586b5167b3161b32d7820b9201a9e698c71e271602c"}, + {file = "ipykernel-6.26.0.tar.gz", hash = "sha256:553856658eb8430bbe9653ea041a41bff63e9606fc4628873fc92a6cf3abd404"}, ] [package.dependencies] @@ -1204,14 +1234,14 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.12.2" +version = "8.16.1" description = "IPython: Productive Interactive Computing" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "ipython-8.12.2-py3-none-any.whl", hash = "sha256:ea8801f15dfe4ffb76dea1b09b847430ffd70d827b41735c64a0638a04103bfc"}, - {file = "ipython-8.12.2.tar.gz", hash = "sha256:c7b80eb7f5a855a88efc971fda506ff7a91c280b42cdae26643e0f601ea281ea"}, + {file = "ipython-8.16.1-py3-none-any.whl", hash = "sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e"}, + {file = "ipython-8.16.1.tar.gz", hash = "sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"}, ] [package.dependencies] @@ -1219,6 +1249,7 @@ appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} @@ -1227,12 +1258,11 @@ prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -1256,22 +1286,22 @@ files = [ [[package]] name = "ipywidgets" -version = "8.1.0" +version = "8.1.1" description = "Jupyter interactive widgets" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "ipywidgets-8.1.0-py3-none-any.whl", hash = "sha256:6c8396cc7b8c95dfb4e9ab0054f48c002f045e7e5d7ae523f559d64e525a98ab"}, - {file = "ipywidgets-8.1.0.tar.gz", hash = "sha256:ce97dd90525b3066fd00094690964e7eac14cf9b7745d35565b5eeac20cce687"}, + {file = "ipywidgets-8.1.1-py3-none-any.whl", hash = "sha256:2b88d728656aea3bbfd05d32c747cfd0078f9d7e159cf982433b58ad717eed7f"}, + {file = "ipywidgets-8.1.1.tar.gz", hash = "sha256:40211efb556adec6fa450ccc2a77d59ca44a060f4f9f136833df59c9f538e6e8"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.7,<3.1.0" +jupyterlab-widgets = ">=3.0.9,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.7,<4.1.0" +widgetsnbextension = ">=4.0.9,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] @@ -1323,14 +1353,14 @@ files = [ [[package]] name = "jedi" -version = "0.19.0" +version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.19.0-py2.py3-none-any.whl", hash = "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e"}, - {file = "jedi-0.19.0.tar.gz", hash = "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4"}, + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, ] [package.dependencies] @@ -1339,7 +1369,7 @@ parso = ">=0.8.3,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" @@ -1388,29 +1418,25 @@ files = [ [[package]] name = "jsonschema" -version = "4.18.4" +version = "4.17.3" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "jsonschema-4.18.4-py3-none-any.whl", hash = "sha256:971be834317c22daaa9132340a51c01b50910724082c2c1a2ac87eeec153a3fe"}, - {file = "jsonschema-4.18.4.tar.gz", hash = "sha256:fb3642735399fa958c0d2aad7057901554596c63349f4f6b283c493cf692a25d"}, + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, ] [package.dependencies] -attrs = ">=22.2.0" +attrs = ">=17.4.0" fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} -jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} -referencing = ">=0.28.4" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} -rpds-py = ">=0.7.1" uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} @@ -1418,22 +1444,6 @@ webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-n format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] -[[package]] -name = "jsonschema-specifications" -version = "2023.7.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, -] - -[package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.28.0" - [[package]] name = "jupyter" version = "1.0.0" @@ -1457,18 +1467,17 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "8.3.0" +version = "8.5.0" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, - {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, + {file = "jupyter_client-8.5.0-py3-none-any.whl", hash = "sha256:c3877aac7257ec68d79b5c622ce986bd2a992ca42f6ddc9b4dd1da50e89f7028"}, + {file = "jupyter_client-8.5.0.tar.gz", hash = "sha256:e8754066510ce456358df363f97eae64b50860f30dc1fe8c6771440db3be9a63"}, ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" @@ -1506,14 +1515,14 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.3.1" +version = "5.4.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, - {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, + {file = "jupyter_core-5.4.0-py3-none-any.whl", hash = "sha256:66e252f675ac04dcf2feb6ed4afb3cd7f68cf92f483607522dc251f32d471571"}, + {file = "jupyter_core-5.4.0.tar.gz", hash = "sha256:e4b98344bb94ee2e3e6c4519a97d001656009f9cb2b7f2baf15b3c205770011d"}, ] [package.dependencies] @@ -1527,21 +1536,20 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.7.0" +version = "0.6.3" description = "Jupyter Event System library" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "jupyter_events-0.7.0-py3-none-any.whl", hash = "sha256:4753da434c13a37c3f3c89b500afa0c0a6241633441421f6adafe2fb2e2b924e"}, - {file = "jupyter_events-0.7.0.tar.gz", hash = "sha256:7be27f54b8388c03eefea123a4f79247c5b9381c49fb1cd48615ee191eb12615"}, + {file = "jupyter_events-0.6.3-py3-none-any.whl", hash = "sha256:57a2749f87ba387cd1bfd9b22a0875b889237dbf2edc2121ebb22bde47036c17"}, + {file = "jupyter_events-0.6.3.tar.gz", hash = "sha256:9a6e9995f75d1b7146b436ea24d696ce3a35bfa8bfe45e0c33c334c79464d0b3"}, ] [package.dependencies] -jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +jsonschema = {version = ">=3.2.0", extras = ["format-nongpl"]} python-json-logger = ">=2.0.4" pyyaml = ">=5.3" -referencing = "*" rfc3339-validator = "*" rfc3986-validator = ">=0.1.1" traitlets = ">=5.3" @@ -1549,7 +1557,7 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] -test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] +test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "pytest-cov", "rich"] [[package]] name = "jupyter-lsp" @@ -1564,19 +1572,18 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.7.0" +version = "2.9.1" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.7.0-py3-none-any.whl", hash = "sha256:6a77912aff643e53fa14bdb2634884b52b784a4be77ce8e93f7283faed0f0849"}, - {file = "jupyter_server-2.7.0.tar.gz", hash = "sha256:36da0a266d31a41ac335a366c88933c17dfa5bb817a48f5c02c16d303bc9477f"}, + {file = "jupyter_server-2.9.1-py3-none-any.whl", hash = "sha256:21ad1a3d455d5a79ce4bef5201925cd17510c17898cf9d54e3ccfb6b12734948"}, + {file = "jupyter_server-2.9.1.tar.gz", hash = "sha256:9ba71be4b9c16e479e4c50c929f8ac4b1015baf90237a08681397a98c76c7e5e"}, ] [package.dependencies] @@ -1594,7 +1601,7 @@ packaging = "*" prometheus-client = "*" pywinpty = {version = "*", markers = "os_name == \"nt\""} pyzmq = ">=24" -send2trash = "*" +send2trash = ">=1.8.2" terminado = ">=0.8.3" tornado = ">=6.2.0" traitlets = ">=5.6.0" @@ -1626,20 +1633,18 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", [[package]] name = "jupyterlab" -version = "4.0.3" +version = "4.0.7" description = "JupyterLab computational environment" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.3-py3-none-any.whl", hash = "sha256:d369944391b1d15f2d1f3cb965fb67352956279b2ae6f03ce7947a43940a8301"}, - {file = "jupyterlab-4.0.3.tar.gz", hash = "sha256:e14d1ce46a613028111d0d476a1d7d6b094003b7462bac669f5b478317abcb39"}, + {file = "jupyterlab-4.0.7-py3-none-any.whl", hash = "sha256:08683045117cc495531fdb39c22ababb9aaac6977a45e67cfad20046564c9c7c"}, + {file = "jupyterlab-4.0.7.tar.gz", hash = "sha256:48792efd9f962b2bcda1f87d72168ff122c288b1d97d32109e4a11b33dc862be"}, ] [package.dependencies] async-lru = ">=1.0.0" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -importlib-resources = {version = ">=1.4", markers = "python_version < \"3.9\""} ipykernel = "*" jinja2 = ">=3.0.3" jupyter-core = "*" @@ -1653,8 +1658,8 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.3.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.271)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8)", "sphinx-copybutton"] +dev = ["black[jupyter] (==23.7.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.286)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] @@ -1684,7 +1689,6 @@ files = [ [package.dependencies] babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jinja2 = ">=3.0.3" json5 = ">=0.9.0" jsonschema = ">=4.17.3" @@ -1699,14 +1703,14 @@ test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-valida [[package]] name = "jupyterlab-widgets" -version = "3.0.8" +version = "3.0.9" description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.8-py3-none-any.whl", hash = "sha256:4715912d6ceab839c9db35953c764b3214ebbc9161c809f6e0510168845dfdf5"}, - {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, + {file = "jupyterlab_widgets-3.0.9-py3-none-any.whl", hash = "sha256:3cf5bdf5b897bf3bccf1c11873aa4afd776d7430200f765e0686bd352487b58d"}, + {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, ] [[package]] @@ -1767,9 +1771,6 @@ files = [ {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, ] -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - [package.extras] testing = ["coverage", "pyyaml"] @@ -1801,16 +1802,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1896,14 +1887,14 @@ files = [ [[package]] name = "mistune" -version = "3.0.1" +version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mistune-3.0.1-py3-none-any.whl", hash = "sha256:b9b3e438efbb57c62b5beb5e134dab664800bdf1284a7ee09e8b12b13eb1aac6"}, - {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, ] [[package]] @@ -1935,38 +1926,39 @@ i18n = ["babel (>=2.9.0)"] [[package]] name = "mypy" -version = "1.4.1" +version = "1.6.1" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, ] [package.dependencies] @@ -1977,7 +1969,6 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] @@ -2017,21 +2008,20 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.7.3" +version = "7.9.2" description = "Converting Jupyter Notebooks" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.7.3-py3-none-any.whl", hash = "sha256:3022adadff3f86578a47fab7c2228bb3ca9c56a24345642a22f917f6168b48fc"}, - {file = "nbconvert-7.7.3.tar.gz", hash = "sha256:4a5996bf5f3cd16aa0431897ba1aa4c64842c2079f434b3dc6b8c4b252ef3355"}, + {file = "nbconvert-7.9.2-py3-none-any.whl", hash = "sha256:39fe4b8bdd1b0104fdd86fc8a43a9077ba64c720bda4c6132690d917a0a154ee"}, + {file = "nbconvert-7.9.2.tar.gz", hash = "sha256:e56cc7588acc4f93e2bb5a34ec69028e4941797b2bfaf6462f18a41d1cc258c9"}, ] [package.dependencies] beautifulsoup4 = "*" bleach = "!=5.0.0" defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" @@ -2051,7 +2041,7 @@ docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sp qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] +test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pytest", "pytest-dependency"] webpdf = ["playwright"] [[package]] @@ -2078,30 +2068,29 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.7" +version = "1.5.8" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, - {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] [[package]] name = "notebook" -version = "7.0.1" +version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.1-py3-none-any.whl", hash = "sha256:35327476042140e8739ff8fcfecdc915658ae72b4db72d6e3b537badcdbf9e35"}, - {file = "notebook-7.0.1.tar.gz", hash = "sha256:2e16ad4e63ea89f7efbe212ee7c1693fcfa5ab55ffef75047530f74af4bd926c"}, + {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, + {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, ] [package.dependencies] -importlib-resources = {version = ">=5.0", markers = "python_version < \"3.9\""} jupyter-server = ">=2.4.0,<3" jupyterlab = ">=4.0.2,<5" jupyterlab-server = ">=2.22.1,<3" @@ -2111,7 +2100,7 @@ tornado = ">=6.2.0" [package.extras] dev = ["hatch", "pre-commit"] docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] [[package]] name = "notebook-shim" @@ -2131,29 +2120,140 @@ jupyter-server = ">=1.8,<3" [package.extras] test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] +[[package]] +name = "numpy" +version = "1.26.1" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = "<3.13,>=3.9" +files = [ + {file = "numpy-1.26.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82e871307a6331b5f09efda3c22e03c095d957f04bf6bc1804f30048d0e5e7af"}, + {file = "numpy-1.26.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdd9ec98f0063d93baeb01aad472a1a0840dee302842a2746a7a8e92968f9575"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d78f269e0c4fd365fc2992c00353e4530d274ba68f15e968d8bc3c69ce5f5244"}, + {file = "numpy-1.26.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ab9163ca8aeb7fd32fe93866490654d2f7dda4e61bc6297bf72ce07fdc02f67"}, + {file = "numpy-1.26.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:78ca54b2f9daffa5f323f34cdf21e1d9779a54073f0018a3094ab907938331a2"}, + {file = "numpy-1.26.1-cp310-cp310-win32.whl", hash = "sha256:d1cfc92db6af1fd37a7bb58e55c8383b4aa1ba23d012bdbba26b4bcca45ac297"}, + {file = "numpy-1.26.1-cp310-cp310-win_amd64.whl", hash = "sha256:d2984cb6caaf05294b8466966627e80bf6c7afd273279077679cb010acb0e5ab"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cd7837b2b734ca72959a1caf3309457a318c934abef7a43a14bb984e574bbb9a"}, + {file = "numpy-1.26.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c59c046c31a43310ad0199d6299e59f57a289e22f0f36951ced1c9eac3665b9"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d58e8c51a7cf43090d124d5073bc29ab2755822181fcad978b12e144e5e5a4b3"}, + {file = "numpy-1.26.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6081aed64714a18c72b168a9276095ef9155dd7888b9e74b5987808f0dd0a974"}, + {file = "numpy-1.26.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:97e5d6a9f0702c2863aaabf19f0d1b6c2628fbe476438ce0b5ce06e83085064c"}, + {file = "numpy-1.26.1-cp311-cp311-win32.whl", hash = "sha256:b9d45d1dbb9de84894cc50efece5b09939752a2d75aab3a8b0cef6f3a35ecd6b"}, + {file = "numpy-1.26.1-cp311-cp311-win_amd64.whl", hash = "sha256:3649d566e2fc067597125428db15d60eb42a4e0897fc48d28cb75dc2e0454e53"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d1bd82d539607951cac963388534da3b7ea0e18b149a53cf883d8f699178c0f"}, + {file = "numpy-1.26.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:afd5ced4e5a96dac6725daeb5242a35494243f2239244fad10a90ce58b071d24"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03fb25610ef560a6201ff06df4f8105292ba56e7cdd196ea350d123fc32e24e"}, + {file = "numpy-1.26.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcfaf015b79d1f9f9c9fd0731a907407dc3e45769262d657d754c3a028586124"}, + {file = "numpy-1.26.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e509cbc488c735b43b5ffea175235cec24bbc57b227ef1acc691725beb230d1c"}, + {file = "numpy-1.26.1-cp312-cp312-win32.whl", hash = "sha256:af22f3d8e228d84d1c0c44c1fbdeb80f97a15a0abe4f080960393a00db733b66"}, + {file = "numpy-1.26.1-cp312-cp312-win_amd64.whl", hash = "sha256:9f42284ebf91bdf32fafac29d29d4c07e5e9d1af862ea73686581773ef9e73a7"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bb894accfd16b867d8643fc2ba6c8617c78ba2828051e9a69511644ce86ce83e"}, + {file = "numpy-1.26.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e44ccb93f30c75dfc0c3aa3ce38f33486a75ec9abadabd4e59f114994a9c4617"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9696aa2e35cc41e398a6d42d147cf326f8f9d81befcb399bc1ed7ffea339b64e"}, + {file = "numpy-1.26.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b411040beead47a228bde3b2241100454a6abde9df139ed087bd73fc0a4908"}, + {file = "numpy-1.26.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e11668d6f756ca5ef534b5be8653d16c5352cbb210a5c2a79ff288e937010d5"}, + {file = "numpy-1.26.1-cp39-cp39-win32.whl", hash = "sha256:d1d2c6b7dd618c41e202c59c1413ef9b2c8e8a15f5039e344af64195459e3104"}, + {file = "numpy-1.26.1-cp39-cp39-win_amd64.whl", hash = "sha256:59227c981d43425ca5e5c01094d59eb14e8772ce6975d4b2fc1e106a833d5ae2"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06934e1a22c54636a059215d6da99e23286424f316fddd979f5071093b648668"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76ff661a867d9272cd2a99eed002470f46dbe0943a5ffd140f49be84f68ffc42"}, + {file = "numpy-1.26.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6965888d65d2848e8768824ca8288db0a81263c1efccec881cb35a0d805fcd2f"}, + {file = "numpy-1.26.1.tar.gz", hash = "sha256:c8c6c72d4a9f831f328efb1312642a1cafafaa88981d9ab76368d50d07d93cbe"}, +] + [[package]] name = "overrides" -version = "7.3.1" +version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"}, - {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"}, + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, ] [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.1.1" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58d997dbee0d4b64f3cb881a24f918b5f25dd64ddf31f467bb9b67ae4c63a1e4"}, + {file = "pandas-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02304e11582c5d090e5a52aec726f31fe3f42895d6bfc1f28738f9b64b6f0614"}, + {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa8f0966de2c22de408d0e322db2faed6f6e74265aa0856f3824813cf124363"}, + {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f84c144dee086fe4f04a472b5cd51e680f061adf75c1ae4fc3a9275560f8f4"}, + {file = "pandas-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ce97667d06d69396d72be074f0556698c7f662029322027c226fd7a26965cb"}, + {file = "pandas-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:4c3f32fd7c4dccd035f71734df39231ac1a6ff95e8bdab8d891167197b7018d2"}, + {file = "pandas-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e2959720b70e106bb1d8b6eadd8ecd7c8e99ccdbe03ee03260877184bb2877d"}, + {file = "pandas-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25e8474a8eb258e391e30c288eecec565bfed3e026f312b0cbd709a63906b6f8"}, + {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bd1685556f3374520466998929bade3076aeae77c3e67ada5ed2b90b4de7f0"}, + {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3657869c7902810f32bd072f0740487f9e030c1a3ab03e0af093db35a9d14e"}, + {file = "pandas-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:05674536bd477af36aa2effd4ec8f71b92234ce0cc174de34fd21e2ee99adbc2"}, + {file = "pandas-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:b407381258a667df49d58a1b637be33e514b07f9285feb27769cedb3ab3d0b3a"}, + {file = "pandas-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c747793c4e9dcece7bb20156179529898abf505fe32cb40c4052107a3c620b49"}, + {file = "pandas-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bcad1e6fb34b727b016775bea407311f7721db87e5b409e6542f4546a4951ea"}, + {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5ec7740f9ccb90aec64edd71434711f58ee0ea7f5ed4ac48be11cfa9abf7317"}, + {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29deb61de5a8a93bdd033df328441a79fcf8dd3c12d5ed0b41a395eef9cd76f0"}, + {file = "pandas-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f99bebf19b7e03cf80a4e770a3e65eee9dd4e2679039f542d7c1ace7b7b1daa"}, + {file = "pandas-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:84e7e910096416adec68075dc87b986ff202920fb8704e6d9c8c9897fe7332d6"}, + {file = "pandas-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366da7b0e540d1b908886d4feb3d951f2f1e572e655c1160f5fde28ad4abb750"}, + {file = "pandas-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e50e72b667415a816ac27dfcfe686dc5a0b02202e06196b943d54c4f9c7693e"}, + {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1ab6a25da197f03ebe6d8fa17273126120874386b4ac11c1d687df288542dd"}, + {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0dbfea0dd3901ad4ce2306575c54348d98499c95be01b8d885a2737fe4d7a98"}, + {file = "pandas-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0489b0e6aa3d907e909aef92975edae89b1ee1654db5eafb9be633b0124abe97"}, + {file = "pandas-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cdb0fab0400c2cb46dafcf1a0fe084c8bb2480a1fa8d81e19d15e12e6d4ded2"}, + {file = "pandas-2.1.1.tar.gz", hash = "sha256:fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] [[package]] name = "pandocfilters" @@ -2234,28 +2334,16 @@ files = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - [[package]] name = "platformdirs" -version = "3.10.0" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] @@ -2264,14 +2352,14 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -2329,26 +2417,28 @@ wcwidth = "*" [[package]] name = "psutil" -version = "5.9.5" +version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, ] [package.extras] @@ -2356,25 +2446,23 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg2" -version = "2.9.6" +version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "psycopg2-2.9.6-cp310-cp310-win32.whl", hash = "sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1"}, - {file = "psycopg2-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494"}, - {file = "psycopg2-2.9.6-cp311-cp311-win32.whl", hash = "sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad"}, - {file = "psycopg2-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:11aca705ec888e4f4cea97289a0bf0f22a067a32614f6ef64fcf7b8bfbc53744"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:36c941a767341d11549c0fbdbb2bf5be2eda4caf87f65dfcd7d146828bd27f39"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:869776630c04f335d4124f120b7fb377fe44b0a7645ab3c34b4ba42516951889"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:a8ad4a47f42aa6aec8d061fdae21eaed8d864d4bb0f0cade5ad32ca16fcd6258"}, - {file = "psycopg2-2.9.6-cp38-cp38-win32.whl", hash = "sha256:2362ee4d07ac85ff0ad93e22c693d0f37ff63e28f0615a16b6635a645f4b9214"}, - {file = "psycopg2-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:d24ead3716a7d093b90b27b3d73459fe8cd90fd7065cf43b3c40966221d8c394"}, - {file = "psycopg2-2.9.6-cp39-cp39-win32.whl", hash = "sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f"}, - {file = "psycopg2-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5"}, - {file = "psycopg2-2.9.6.tar.gz", hash = "sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011"}, + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, ] [[package]] @@ -2404,16 +2492,35 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "pycap" +version = "2.4.0" +description = "PyCap: Python interface to REDCap" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pycap-2.4.0-py3-none-any.whl", hash = "sha256:056f432204f20c99b1479dff41ac798da3e2a995f421dd081de330729585a483"}, + {file = "pycap-2.4.0.tar.gz", hash = "sha256:15f84e8b61cd39efbd30449e73738850629581165a2144b15d676ceb7936d49e"}, +] + +[package.dependencies] +requests = ">=2.20,<3.0" +semantic-version = ">=2.8.5,<3.0.0" + +[package.extras] +data-science = ["pandas (>=1.3.4,<2.0.0)"] + [[package]] name = "pycodestyle" -version = "2.11.0" +version = "2.11.1" description = "Python style guide checker" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, - {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] @@ -2483,14 +2590,14 @@ files = [ [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -2516,18 +2623,18 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "2.17.5" +version = "2.17.7" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.17.5-py3-none-any.whl", hash = "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413"}, - {file = "pylint-2.17.5.tar.gz", hash = "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252"}, + {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, + {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, ] [package.dependencies] -astroid = ">=2.15.6,<=2.17.0-dev0" +astroid = ">=2.15.8,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -2538,7 +2645,6 @@ mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] spelling = ["pyenchant (>=3.2,<4.0)"] @@ -2546,30 +2652,75 @@ testutils = ["gitpython (>3)"] [[package]] name = "pymdown-extensions" -version = "10.1" +version = "10.3.1" description = "Extension pack for Python Markdown." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.1-py3-none-any.whl", hash = "sha256:ef25dbbae530e8f67575d222b75ff0649b1e841e22c2ae9a20bad9472c2207dc"}, - {file = "pymdown_extensions-10.1.tar.gz", hash = "sha256:508009b211373058debb8247e168de4cbcb91b1bff7b5e961b2c3e864e00b195"}, + {file = "pymdown_extensions-10.3.1-py3-none-any.whl", hash = "sha256:8cba67beb2a1318cdaf742d09dff7c0fc4cafcc290147ade0f8fb7b71522711a"}, + {file = "pymdown_extensions-10.3.1.tar.gz", hash = "sha256:f6c79941498a458852853872e379e7bab63888361ba20992fc8b4f8a9b61735e"}, ] [package.dependencies] markdown = ">=3.2" pyyaml = "*" +[package.extras] +extra = ["pygments (>=2.12)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -2687,14 +2838,14 @@ files = [ [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -2723,17 +2874,18 @@ files = [ [[package]] name = "pywinpty" -version = "2.0.11" +version = "2.0.12" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pywinpty-2.0.11-cp310-none-win_amd64.whl", hash = "sha256:452f10ac9ff8ab9151aa8cea9e491a9612a12250b1899278c6a56bc184afb47f"}, - {file = "pywinpty-2.0.11-cp311-none-win_amd64.whl", hash = "sha256:6701867d42aec1239bc0fedf49a336570eb60eb886e81763db77ea2b6c533cc3"}, - {file = "pywinpty-2.0.11-cp38-none-win_amd64.whl", hash = "sha256:0ffd287751ad871141dc9724de70ea21f7fc2ff1af50861e0d232cf70739d8c4"}, - {file = "pywinpty-2.0.11-cp39-none-win_amd64.whl", hash = "sha256:e4e7f023c28ca7aa8e1313e53ba80a4d10171fe27857b7e02f99882dfe3e8638"}, - {file = "pywinpty-2.0.11.tar.gz", hash = "sha256:e244cffe29a894876e2cd251306efd0d8d64abd5ada0a46150a4a71c0b9ad5c5"}, + {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, + {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, + {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, + {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, + {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, + {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, ] [[package]] @@ -2813,89 +2965,105 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "25.1.0" +version = "25.1.1" description = "Python bindings for 0MQ" category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, - {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, - {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, - {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, - {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, - {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, - {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, - {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, - {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, - {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, - {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, - {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:381469297409c5adf9a0e884c5eb5186ed33137badcbbb0560b86e910a2f1e76"}, + {file = "pyzmq-25.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:955215ed0604dac5b01907424dfa28b40f2b2292d6493445dd34d0dfa72586a8"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:985bbb1316192b98f32e25e7b9958088431d853ac63aca1d2c236f40afb17c83"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afea96f64efa98df4da6958bae37f1cbea7932c35878b185e5982821bc883369"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76705c9325d72a81155bb6ab48d4312e0032bf045fb0754889133200f7a0d849"}, + {file = "pyzmq-25.1.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:77a41c26205d2353a4c94d02be51d6cbdf63c06fbc1295ea57dad7e2d3381b71"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:12720a53e61c3b99d87262294e2b375c915fea93c31fc2336898c26d7aed34cd"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:57459b68e5cd85b0be8184382cefd91959cafe79ae019e6b1ae6e2ba8a12cda7"}, + {file = "pyzmq-25.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:292fe3fc5ad4a75bc8df0dfaee7d0babe8b1f4ceb596437213821f761b4589f9"}, + {file = "pyzmq-25.1.1-cp310-cp310-win32.whl", hash = "sha256:35b5ab8c28978fbbb86ea54958cd89f5176ce747c1fb3d87356cf698048a7790"}, + {file = "pyzmq-25.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:11baebdd5fc5b475d484195e49bae2dc64b94a5208f7c89954e9e354fc609d8f"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:d20a0ddb3e989e8807d83225a27e5c2eb2260eaa851532086e9e0fa0d5287d83"}, + {file = "pyzmq-25.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e1c1be77bc5fb77d923850f82e55a928f8638f64a61f00ff18a67c7404faf008"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d89528b4943d27029a2818f847c10c2cecc79fa9590f3cb1860459a5be7933eb"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90f26dc6d5f241ba358bef79be9ce06de58d477ca8485e3291675436d3827cf8"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2b92812bd214018e50b6380ea3ac0c8bb01ac07fcc14c5f86a5bb25e74026e9"}, + {file = "pyzmq-25.1.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f957ce63d13c28730f7fd6b72333814221c84ca2421298f66e5143f81c9f91f"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:047a640f5c9c6ade7b1cc6680a0e28c9dd5a0825135acbd3569cc96ea00b2505"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7f7e58effd14b641c5e4dec8c7dab02fb67a13df90329e61c869b9cc607ef752"}, + {file = "pyzmq-25.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c2910967e6ab16bf6fbeb1f771c89a7050947221ae12a5b0b60f3bca2ee19bca"}, + {file = "pyzmq-25.1.1-cp311-cp311-win32.whl", hash = "sha256:76c1c8efb3ca3a1818b837aea423ff8a07bbf7aafe9f2f6582b61a0458b1a329"}, + {file = "pyzmq-25.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:44e58a0554b21fc662f2712814a746635ed668d0fbc98b7cb9d74cb798d202e6"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:e1ffa1c924e8c72778b9ccd386a7067cddf626884fd8277f503c48bb5f51c762"}, + {file = "pyzmq-25.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1af379b33ef33757224da93e9da62e6471cf4a66d10078cf32bae8127d3d0d4a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cff084c6933680d1f8b2f3b4ff5bbb88538a4aac00d199ac13f49d0698727ecb"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2400a94f7dd9cb20cd012951a0cbf8249e3d554c63a9c0cdfd5cbb6c01d2dec"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d81f1ddae3858b8299d1da72dd7d19dd36aab654c19671aa8a7e7fb02f6638a"}, + {file = "pyzmq-25.1.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:255ca2b219f9e5a3a9ef3081512e1358bd4760ce77828e1028b818ff5610b87b"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a882ac0a351288dd18ecae3326b8a49d10c61a68b01419f3a0b9a306190baf69"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:724c292bb26365659fc434e9567b3f1adbdb5e8d640c936ed901f49e03e5d32e"}, + {file = "pyzmq-25.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ca1ed0bb2d850aa8471387882247c68f1e62a4af0ce9c8a1dbe0d2bf69e41fb"}, + {file = "pyzmq-25.1.1-cp312-cp312-win32.whl", hash = "sha256:b3451108ab861040754fa5208bca4a5496c65875710f76789a9ad27c801a0075"}, + {file = "pyzmq-25.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:eadbefd5e92ef8a345f0525b5cfd01cf4e4cc651a2cffb8f23c0dd184975d787"}, + {file = "pyzmq-25.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db0b2af416ba735c6304c47f75d348f498b92952f5e3e8bff449336d2728795d"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c133e93b405eb0d36fa430c94185bdd13c36204a8635470cccc200723c13bb"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:273bc3959bcbff3f48606b28229b4721716598d76b5aaea2b4a9d0ab454ec062"}, + {file = "pyzmq-25.1.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cbc8df5c6a88ba5ae385d8930da02201165408dde8d8322072e3e5ddd4f68e22"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:18d43df3f2302d836f2a56f17e5663e398416e9dd74b205b179065e61f1a6edf"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:73461eed88a88c866656e08f89299720a38cb4e9d34ae6bf5df6f71102570f2e"}, + {file = "pyzmq-25.1.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c850ce7976d19ebe7b9d4b9bb8c9dfc7aac336c0958e2651b88cbd46682123"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win32.whl", hash = "sha256:d2045d6d9439a0078f2a34b57c7b18c4a6aef0bee37f22e4ec9f32456c852c71"}, + {file = "pyzmq-25.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:458dea649f2f02a0b244ae6aef8dc29325a2810aa26b07af8374dc2a9faf57e3"}, + {file = "pyzmq-25.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7cff25c5b315e63b07a36f0c2bab32c58eafbe57d0dce61b614ef4c76058c115"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1579413ae492b05de5a6174574f8c44c2b9b122a42015c5292afa4be2507f28"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d0a409d3b28607cc427aa5c30a6f1e4452cc44e311f843e05edb28ab5e36da0"}, + {file = "pyzmq-25.1.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21eb4e609a154a57c520e3d5bfa0d97e49b6872ea057b7c85257b11e78068222"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:034239843541ef7a1aee0c7b2cb7f6aafffb005ede965ae9cbd49d5ff4ff73cf"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f8115e303280ba09f3898194791a153862cbf9eef722ad8f7f741987ee2a97c7"}, + {file = "pyzmq-25.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1a5d26fe8f32f137e784f768143728438877d69a586ddeaad898558dc971a5ae"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win32.whl", hash = "sha256:f32260e556a983bc5c7ed588d04c942c9a8f9c2e99213fec11a031e316874c7e"}, + {file = "pyzmq-25.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:abf34e43c531bbb510ae7e8f5b2b1f2a8ab93219510e2b287a944432fad135f3"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:87e34f31ca8f168c56d6fbf99692cc8d3b445abb5bfd08c229ae992d7547a92a"}, + {file = "pyzmq-25.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c9c6c9b2c2f80747a98f34ef491c4d7b1a8d4853937bb1492774992a120f475d"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5619f3f5a4db5dbb572b095ea3cb5cc035335159d9da950830c9c4db2fbb6995"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5a34d2395073ef862b4032343cf0c32a712f3ab49d7ec4f42c9661e0294d106f"}, + {file = "pyzmq-25.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0e6b78220aba09815cd1f3a32b9c7cb3e02cb846d1cfc526b6595f6046618"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3669cf8ee3520c2f13b2e0351c41fea919852b220988d2049249db10046a7afb"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2d163a18819277e49911f7461567bda923461c50b19d169a062536fffe7cd9d2"}, + {file = "pyzmq-25.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:df27ffddff4190667d40de7beba4a950b5ce78fe28a7dcc41d6f8a700a80a3c0"}, + {file = "pyzmq-25.1.1-cp38-cp38-win32.whl", hash = "sha256:a382372898a07479bd34bda781008e4a954ed8750f17891e794521c3e21c2e1c"}, + {file = "pyzmq-25.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:52533489f28d62eb1258a965f2aba28a82aa747202c8fa5a1c7a43b5db0e85c1"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:03b3f49b57264909aacd0741892f2aecf2f51fb053e7d8ac6767f6c700832f45"}, + {file = "pyzmq-25.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:330f9e188d0d89080cde66dc7470f57d1926ff2fb5576227f14d5be7ab30b9fa"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2ca57a5be0389f2a65e6d3bb2962a971688cbdd30b4c0bd188c99e39c234f414"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d457aed310f2670f59cc5b57dcfced452aeeed77f9da2b9763616bd57e4dbaae"}, + {file = "pyzmq-25.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c56d748ea50215abef7030c72b60dd723ed5b5c7e65e7bc2504e77843631c1a6"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f03d3f0d01cb5a018debeb412441996a517b11c5c17ab2001aa0597c6d6882c"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:820c4a08195a681252f46926de10e29b6bbf3e17b30037bd4250d72dd3ddaab8"}, + {file = "pyzmq-25.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ef5f01d25b67ca8f98120d5fa1d21efe9611604e8eb03a5147360f517dd1e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win32.whl", hash = "sha256:04ccbed567171579ec2cebb9c8a3e30801723c575601f9a990ab25bcac6b51e2"}, + {file = "pyzmq-25.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:e61f091c3ba0c3578411ef505992d356a812fb200643eab27f4f70eed34a29ef"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ade6d25bb29c4555d718ac6d1443a7386595528c33d6b133b258f65f963bb0f6"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0c95ddd4f6e9fca4e9e3afaa4f9df8552f0ba5d1004e89ef0a68e1f1f9807c7"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48e466162a24daf86f6b5ca72444d2bf39a5e58da5f96370078be67c67adc978"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc719161780932c4e11aaebb203be3d6acc6b38d2f26c0f523b5b59d2fc1996"}, + {file = "pyzmq-25.1.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ccf825981640b8c34ae54231b7ed00271822ea1c6d8ba1090ebd4943759abf5"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c2f20ce161ebdb0091a10c9ca0372e023ce24980d0e1f810f519da6f79c60800"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:deee9ca4727f53464daf089536e68b13e6104e84a37820a88b0a057b97bba2d2"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:aa8d6cdc8b8aa19ceb319aaa2b660cdaccc533ec477eeb1309e2a291eaacc43a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:019e59ef5c5256a2c7378f2fb8560fc2a9ff1d315755204295b2eab96b254d0a"}, + {file = "pyzmq-25.1.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b9af3757495c1ee3b5c4e945c1df7be95562277c6e5bccc20a39aec50f826cd0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:548d6482dc8aadbe7e79d1b5806585c8120bafa1ef841167bc9090522b610fa6"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:057e824b2aae50accc0f9a0570998adc021b372478a921506fddd6c02e60308e"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2243700cc5548cff20963f0ca92d3e5e436394375ab8a354bbea2b12911b20b0"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79986f3b4af059777111409ee517da24a529bdbd46da578b33f25580adcff728"}, + {file = "pyzmq-25.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:11d58723d44d6ed4dd677c5615b2ffb19d5c426636345567d6af82be4dff8a55"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:49d238cf4b69652257db66d0c623cd3e09b5d2e9576b56bc067a396133a00d4a"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fedbdc753827cf014c01dbbee9c3be17e5a208dcd1bf8641ce2cd29580d1f0d4"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc16ac425cc927d0a57d242589f87ee093884ea4804c05a13834d07c20db203c"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c1d2aed9079c6b0c9550a7257a836b4a637feb334904610f06d70eb44c56d2"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e8a701123029cc240cea61dd2d16ad57cab4691804143ce80ecd9286b464d180"}, + {file = "pyzmq-25.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61706a6b6c24bdece85ff177fec393545a3191eeda35b07aaa1458a027ad1304"}, + {file = "pyzmq-25.1.1.tar.gz", hash = "sha256:259c22485b71abacdfa8bf79720cd7bcf4b9d128b30ea554f01ae71fdbfdaa23"}, ] [package.dependencies] @@ -2903,14 +3071,14 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.4.3" +version = "5.4.4" description = "Jupyter Qt console" category = "dev" optional = false python-versions = ">= 3.7" files = [ - {file = "qtconsole-5.4.3-py3-none-any.whl", hash = "sha256:35fd6e87b1f6d1fd41801b07e69339f8982e76afd4fa8ef35595bc6036717189"}, - {file = "qtconsole-5.4.3.tar.gz", hash = "sha256:5e4082a86a201796b2a5cfd4298352d22b158b51b57736531824715fc2a979dd"}, + {file = "qtconsole-5.4.4-py3-none-any.whl", hash = "sha256:a3b69b868e041c2c698bdc75b0602f42e130ffb256d6efa48f9aa756c97672aa"}, + {file = "qtconsole-5.4.4.tar.gz", hash = "sha256:b7ffb53d74f23cee29f4cdb55dd6fabc8ec312d94f3c46ba38e1dde458693dfb"}, ] [package.dependencies] @@ -2921,7 +3089,7 @@ jupyter-core = "*" packaging = "*" pygments = "*" pyzmq = ">=17.1" -qtpy = ">=2.0.1" +qtpy = ">=2.4.0" traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" [package.extras] @@ -2930,14 +3098,14 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "2.3.1" +version = "2.4.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "QtPy-2.3.1-py3-none-any.whl", hash = "sha256:5193d20e0b16e4d9d3bc2c642d04d9f4e2c892590bd1b9c92bfe38a95d5a2e12"}, - {file = "QtPy-2.3.1.tar.gz", hash = "sha256:a8c74982d6d172ce124d80cafd39653df78989683f760f2281ba91a6e7b9de8b"}, + {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, + {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, ] [package.dependencies] @@ -2947,20 +3115,23 @@ packaging = "*" test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] -name = "referencing" -version = "0.30.0" -description = "JSON Referencing + Python" +name = "redis" +version = "5.0.1" +description = "Python client for Redis database and key-value store" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "referencing-0.30.0-py3-none-any.whl", hash = "sha256:c257b08a399b6c2f5a3510a50d28ab5dbc7bbde049bcaf954d43c446f83ab548"}, - {file = "referencing-0.30.0.tar.gz", hash = "sha256:47237742e990457f7512c7d27486394a9aadaf876cbfaa4be65b27b4f4d47c6b"}, + {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, + {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, ] [package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] [[package]] name = "requests" @@ -3012,112 +3183,21 @@ files = [ ] [[package]] -name = "rpds-py" -version = "0.9.2" -description = "Python bindings to Rust's persistent data structures (rpds)" +name = "semantic-version" +version = "2.10.0" +description = "A library implementing the 'SemVer' scheme." category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7" files = [ - {file = "rpds_py-0.9.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ab6919a09c055c9b092798ce18c6c4adf49d24d4d9e43a92b257e3f2548231e7"}, - {file = "rpds_py-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d55777a80f78dd09410bd84ff8c95ee05519f41113b2df90a69622f5540c4f8b"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a216b26e5af0a8e265d4efd65d3bcec5fba6b26909014effe20cd302fd1138fa"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29cd8bfb2d716366a035913ced99188a79b623a3512292963d84d3e06e63b496"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44659b1f326214950a8204a248ca6199535e73a694be8d3e0e869f820767f12f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:745f5a43fdd7d6d25a53ab1a99979e7f8ea419dfefebcab0a5a1e9095490ee5e"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a987578ac5214f18b99d1f2a3851cba5b09f4a689818a106c23dbad0dfeb760f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf4151acb541b6e895354f6ff9ac06995ad9e4175cbc6d30aaed08856558201f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03421628f0dc10a4119d714a17f646e2837126a25ac7a256bdf7c3943400f67f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13b602dc3e8dff3063734f02dcf05111e887f301fdda74151a93dbbc249930fe"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fae5cb554b604b3f9e2c608241b5d8d303e410d7dfb6d397c335f983495ce7f6"}, - {file = "rpds_py-0.9.2-cp310-none-win32.whl", hash = "sha256:47c5f58a8e0c2c920cc7783113df2fc4ff12bf3a411d985012f145e9242a2764"}, - {file = "rpds_py-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:4ea6b73c22d8182dff91155af018b11aac9ff7eca085750455c5990cb1cfae6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e564d2238512c5ef5e9d79338ab77f1cbbda6c2d541ad41b2af445fb200385e3"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f411330a6376fb50e5b7a3e66894e4a39e60ca2e17dce258d53768fea06a37bd"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e7521f5af0233e89939ad626b15278c71b69dc1dfccaa7b97bd4cdf96536bb7"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3335c03100a073883857e91db9f2e0ef8a1cf42dc0369cbb9151c149dbbc1b"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d25b1c1096ef0447355f7293fbe9ad740f7c47ae032c2884113f8e87660d8f6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a5d3fbd02efd9cf6a8ffc2f17b53a33542f6b154e88dd7b42ef4a4c0700fdad"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5934e2833afeaf36bd1eadb57256239785f5af0220ed8d21c2896ec4d3a765f"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:095b460e117685867d45548fbd8598a8d9999227e9061ee7f012d9d264e6048d"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91378d9f4151adc223d584489591dbb79f78814c0734a7c3bfa9c9e09978121c"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:24a81c177379300220e907e9b864107614b144f6c2a15ed5c3450e19cf536fae"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:de0b6eceb46141984671802d412568d22c6bacc9b230174f9e55fc72ef4f57de"}, - {file = "rpds_py-0.9.2-cp311-none-win32.whl", hash = "sha256:700375326ed641f3d9d32060a91513ad668bcb7e2cffb18415c399acb25de2ab"}, - {file = "rpds_py-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:0766babfcf941db8607bdaf82569ec38107dbb03c7f0b72604a0b346b6eb3298"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1440c291db3f98a914e1afd9d6541e8fc60b4c3aab1a9008d03da4651e67386"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f2996fbac8e0b77fd67102becb9229986396e051f33dbceada3debaacc7033f"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f30d205755566a25f2ae0382944fcae2f350500ae4df4e795efa9e850821d82"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:159fba751a1e6b1c69244e23ba6c28f879a8758a3e992ed056d86d74a194a0f3"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1f044792e1adcea82468a72310c66a7f08728d72a244730d14880cd1dabe36b"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9251eb8aa82e6cf88510530b29eef4fac825a2b709baf5b94a6094894f252387"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01899794b654e616c8625b194ddd1e5b51ef5b60ed61baa7a2d9c2ad7b2a4238"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0c43f8ae8f6be1d605b0465671124aa8d6a0e40f1fb81dcea28b7e3d87ca1e1"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207f57c402d1f8712618f737356e4b6f35253b6d20a324d9a47cb9f38ee43a6b"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b52e7c5ae35b00566d244ffefba0f46bb6bec749a50412acf42b1c3f402e2c90"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:978fa96dbb005d599ec4fd9ed301b1cc45f1a8f7982d4793faf20b404b56677d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6aa8326a4a608e1c28da191edd7c924dff445251b94653988efb059b16577a4d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aad51239bee6bff6823bbbdc8ad85136c6125542bbc609e035ab98ca1e32a192"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4dc3602370679c2dfb818d9c97b1137d4dd412230cfecd3c66a1bf388a196"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd9da77c6ec1f258387957b754f0df60766ac23ed698b61941ba9acccd3284d1"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:190ca6f55042ea4649ed19c9093a9be9d63cd8a97880106747d7147f88a49d18"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:876bf9ed62323bc7dcfc261dbc5572c996ef26fe6406b0ff985cbcf460fc8a4c"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa2818759aba55df50592ecbc95ebcdc99917fa7b55cc6796235b04193eb3c55"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ea4d00850ef1e917815e59b078ecb338f6a8efda23369677c54a5825dbebb55"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5855c85eb8b8a968a74dc7fb014c9166a05e7e7a8377fb91d78512900aadd13d"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:14c408e9d1a80dcb45c05a5149e5961aadb912fff42ca1dd9b68c0044904eb32"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:65a0583c43d9f22cb2130c7b110e695fff834fd5e832a776a107197e59a1898e"}, - {file = "rpds_py-0.9.2-cp38-none-win32.whl", hash = "sha256:71f2f7715935a61fa3e4ae91d91b67e571aeb5cb5d10331ab681256bda2ad920"}, - {file = "rpds_py-0.9.2-cp38-none-win_amd64.whl", hash = "sha256:674c704605092e3ebbbd13687b09c9f78c362a4bc710343efe37a91457123044"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:07e2c54bef6838fa44c48dfbc8234e8e2466d851124b551fc4e07a1cfeb37260"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fdf55283ad38c33e35e2855565361f4bf0abd02470b8ab28d499c663bc5d7c"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:890ba852c16ace6ed9f90e8670f2c1c178d96510a21b06d2fa12d8783a905193"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50025635ba8b629a86d9d5474e650da304cb46bbb4d18690532dd79341467846"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517cbf6e67ae3623c5127206489d69eb2bdb27239a3c3cc559350ef52a3bbf0b"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0836d71ca19071090d524739420a61580f3f894618d10b666cf3d9a1688355b1"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c439fd54b2b9053717cca3de9583be6584b384d88d045f97d409f0ca867d80f"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f68996a3b3dc9335037f82754f9cdbe3a95db42bde571d8c3be26cc6245f2324"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7d68dc8acded354c972116f59b5eb2e5864432948e098c19fe6994926d8e15c3"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f963c6b1218b96db85fc37a9f0851eaf8b9040aa46dec112611697a7023da535"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a46859d7f947061b4010e554ccd1791467d1b1759f2dc2ec9055fa239f1bc26"}, - {file = "rpds_py-0.9.2-cp39-none-win32.whl", hash = "sha256:e07e5dbf8a83c66783a9fe2d4566968ea8c161199680e8ad38d53e075df5f0d0"}, - {file = "rpds_py-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:682726178138ea45a0766907957b60f3a1bf3acdf212436be9733f28b6c5af3c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:196cb208825a8b9c8fc360dc0f87993b8b260038615230242bf18ec84447c08d"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c7671d45530fcb6d5e22fd40c97e1e1e01965fc298cbda523bb640f3d923b387"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b32f0940adec65099f3b1c215ef7f1d025d13ff947975a055989cb7fd019a4"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f67da97f5b9eac838b6980fc6da268622e91f8960e083a34533ca710bec8611"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03975db5f103997904c37e804e5f340c8fdabbb5883f26ee50a255d664eed58c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:987b06d1cdb28f88a42e4fb8a87f094e43f3c435ed8e486533aea0bf2e53d931"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c861a7e4aef15ff91233751619ce3a3d2b9e5877e0fcd76f9ea4f6847183aa16"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02938432352359805b6da099c9c95c8a0547fe4b274ce8f1a91677401bb9a45f"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef1f08f2a924837e112cba2953e15aacfccbbfcd773b4b9b4723f8f2ddded08e"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:35da5cc5cb37c04c4ee03128ad59b8c3941a1e5cd398d78c37f716f32a9b7f67"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:141acb9d4ccc04e704e5992d35472f78c35af047fa0cfae2923835d153f091be"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79f594919d2c1a0cc17d1988a6adaf9a2f000d2e1048f71f298b056b1018e872"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a06418fe1155e72e16dddc68bb3780ae44cebb2912fbd8bb6ff9161de56e1798"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2eb034c94b0b96d5eddb290b7b5198460e2d5d0c421751713953a9c4e47d10"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b08605d248b974eb02f40bdcd1a35d3924c83a2a5e8f5d0fa5af852c4d960af"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0805911caedfe2736935250be5008b261f10a729a303f676d3d5fea6900c96a"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab2299e3f92aa5417d5e16bb45bb4586171c1327568f638e8453c9f8d9e0f020"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c8d7594e38cf98d8a7df25b440f684b510cf4627fe038c297a87496d10a174f"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b9ec12ad5f0a4625db34db7e0005be2632c1013b253a4a60e8302ad4d462afd"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1fcdee18fea97238ed17ab6478c66b2095e4ae7177e35fb71fbe561a27adf620"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:933a7d5cd4b84f959aedeb84f2030f0a01d63ae6cf256629af3081cf3e3426e8"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:686ba516e02db6d6f8c279d1641f7067ebb5dc58b1d0536c4aaebb7bf01cdc5d"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0173c0444bec0a3d7d848eaeca2d8bd32a1b43f3d3fde6617aac3731fa4be05f"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d576c3ef8c7b2d560e301eb33891d1944d965a4d7a2eacb6332eee8a71827db6"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed89861ee8c8c47d6beb742a602f912b1bb64f598b1e2f3d758948721d44d468"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1054a08e818f8e18910f1bee731583fe8f899b0a0a5044c6e680ceea34f93876"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99e7c4bb27ff1aab90dcc3e9d37ee5af0231ed98d99cb6f5250de28889a3d502"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c545d9d14d47be716495076b659db179206e3fd997769bc01e2d550eeb685596"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9039a11bca3c41be5a58282ed81ae422fa680409022b996032a43badef2a3752"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb39aca7a64ad0c9490adfa719dbeeb87d13be137ca189d2564e596f8ba32c07"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2d8b3b3a2ce0eaa00c5bbbb60b6713e94e7e0becab7b3db6c5c77f979e8ed1f1"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:99b1c16f732b3a9971406fbfe18468592c5a3529585a45a35adbc1389a529a03"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c27ee01a6c3223025f4badd533bea5e87c988cb0ba2811b690395dfe16088cfe"}, - {file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"}, + {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, + {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] +[package.extras] +dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"] +doc = ["Sphinx", "sphinx-rtd-theme"] + [[package]] name = "send2trash" version = "1.8.2" @@ -3135,6 +3215,23 @@ nativelib = ["pyobjc-framework-Cocoa", "pywin32"] objc = ["pyobjc-framework-Cocoa"] win32 = ["pywin32"] +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -3173,65 +3270,73 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] name = "sqlalchemy" -version = "2.0.19" +version = "2.0.22" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9deaae357edc2091a9ed5d25e9ee8bba98bcfae454b3911adeaf159c2e9ca9e3"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0bf0fd65b50a330261ec7fe3d091dfc1c577483c96a9fa1e4323e932961aa1b5"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d90ccc15ba1baa345796a8fb1965223ca7ded2d235ccbef80a47b85cea2d71a"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4e688f6784427e5f9479d1a13617f573de8f7d4aa713ba82813bcd16e259d1"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:584f66e5e1979a7a00f4935015840be627e31ca29ad13f49a6e51e97a3fb8cae"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c69ce70047b801d2aba3e5ff3cba32014558966109fecab0c39d16c18510f15"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-win32.whl", hash = "sha256:96f0463573469579d32ad0c91929548d78314ef95c210a8115346271beeeaaa2"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-win_amd64.whl", hash = "sha256:22bafb1da60c24514c141a7ff852b52f9f573fb933b1e6b5263f0daa28ce6db9"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6894708eeb81f6d8193e996257223b6bb4041cb05a17cd5cf373ed836ef87a2"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8f2afd1aafded7362b397581772c670f20ea84d0a780b93a1a1529da7c3d369"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15afbf5aa76f2241184c1d3b61af1a72ba31ce4161013d7cb5c4c2fca04fd6e"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc05b59142445a4efb9c1fd75c334b431d35c304b0e33f4fa0ff1ea4890f92e"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5831138f0cc06b43edf5f99541c64adf0ab0d41f9a4471fd63b54ae18399e4de"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3afa8a21a9046917b3a12ffe016ba7ebe7a55a6fc0c7d950beb303c735c3c3ad"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-win32.whl", hash = "sha256:c896d4e6ab2eba2afa1d56be3d0b936c56d4666e789bfc59d6ae76e9fcf46145"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-win_amd64.whl", hash = "sha256:024d2f67fb3ec697555e48caeb7147cfe2c08065a4f1a52d93c3d44fc8e6ad1c"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:89bc2b374ebee1a02fd2eae6fd0570b5ad897ee514e0f84c5c137c942772aa0c"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd4d410a76c3762511ae075d50f379ae09551d92525aa5bb307f8343bf7c2c12"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f469f15068cd8351826df4080ffe4cc6377c5bf7d29b5a07b0e717dddb4c7ea2"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cda283700c984e699e8ef0fcc5c61f00c9d14b6f65a4f2767c97242513fcdd84"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:43699eb3f80920cc39a380c159ae21c8a8924fe071bccb68fc509e099420b148"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-win32.whl", hash = "sha256:61ada5831db36d897e28eb95f0f81814525e0d7927fb51145526c4e63174920b"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-win_amd64.whl", hash = "sha256:57d100a421d9ab4874f51285c059003292433c648df6abe6c9c904e5bd5b0828"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:16a310f5bc75a5b2ce7cb656d0e76eb13440b8354f927ff15cbaddd2523ee2d1"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf7b5e3856cbf1876da4e9d9715546fa26b6e0ba1a682d5ed2fc3ca4c7c3ec5b"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e7b69d9ced4b53310a87117824b23c509c6fc1f692aa7272d47561347e133b6"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9eb4575bfa5afc4b066528302bf12083da3175f71b64a43a7c0badda2be365"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6b54d1ad7a162857bb7c8ef689049c7cd9eae2f38864fc096d62ae10bc100c7d"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5d6afc41ca0ecf373366fd8e10aee2797128d3ae45eb8467b19da4899bcd1ee0"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-win32.whl", hash = "sha256:430614f18443b58ceb9dedec323ecddc0abb2b34e79d03503b5a7579cd73a531"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-win_amd64.whl", hash = "sha256:eb60699de43ba1a1f77363f563bb2c652f7748127ba3a774f7cf2c7804aa0d3d"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a752b7a9aceb0ba173955d4f780c64ee15a1a991f1c52d307d6215c6c73b3a4c"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7351c05db355da112e056a7b731253cbeffab9dfdb3be1e895368513c7d70106"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa51ce4aea583b0c6b426f4b0563d3535c1c75986c4373a0987d84d22376585b"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae7473a67cd82a41decfea58c0eac581209a0aa30f8bc9190926fbf628bb17f7"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851a37898a8a39783aab603c7348eb5b20d83c76a14766a43f56e6ad422d1ec8"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539010665c90e60c4a1650afe4ab49ca100c74e6aef882466f1de6471d414be7"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-win32.whl", hash = "sha256:f82c310ddf97b04e1392c33cf9a70909e0ae10a7e2ddc1d64495e3abdc5d19fb"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-win_amd64.whl", hash = "sha256:8e712cfd2e07b801bc6b60fdf64853bc2bd0af33ca8fa46166a23fe11ce0dbb0"}, - {file = "SQLAlchemy-2.0.19-py3-none-any.whl", hash = "sha256:314145c1389b021a9ad5aa3a18bac6f5d939f9087d7fc5443be28cba19d2c972"}, - {file = "SQLAlchemy-2.0.19.tar.gz", hash = "sha256:77a14fa20264af73ddcdb1e2b9c5a829b8cc6b8304d0f093271980e36c200a3f"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"}, + {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"}, + {file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"}, + {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"}, + {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"}, + {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"}, + {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, + {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"}, + {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"}, ] [package.dependencies] @@ -3239,7 +3344,7 @@ greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or typing-extensions = ">=4.2.0" [package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] @@ -3264,14 +3369,14 @@ sqlcipher = ["sqlcipher3-binary"] [[package]] name = "stack-data" -version = "0.6.2" +version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" category = "dev" optional = false python-versions = "*" files = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, ] [package.dependencies] @@ -3348,51 +3453,63 @@ files = [ [[package]] name = "tornado" -version = "6.3.2" +version = "6.3.3" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "dev" optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"}, - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05615096845cf50a895026f749195bf0b10b8909f9be672f50b0fe69cba368e4"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b17b1cf5f8354efa3d37c6e28fdfd9c1c1e5122f2cb56dac121ac61baa47cbe"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:29e71c847a35f6e10ca3b5c2990a52ce38b233019d8e858b755ea6ce4dcdd19d"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:834ae7540ad3a83199a8da8f9f2d383e3c3d5130a328889e4cc991acc81e87a0"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6a0848f1aea0d196a7c4f6772197cbe2abc4266f836b0aac76947872cd29b411"}, - {file = "tornado-6.3.2-cp38-abi3-win32.whl", hash = "sha256:7efcbcc30b7c654eb6a8c9c9da787a851c18f8ccd4a5a3a95b05c7accfa068d2"}, - {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"}, - {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, + {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, + {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, + {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, + {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, + {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, + {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, ] [[package]] name = "traitlets" -version = "5.9.0" +version = "5.12.0" description = "Traitlets Python configuration system" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, + {file = "traitlets-5.12.0-py3-none-any.whl", hash = "sha256:81539f07f7aebcde2e4b5ab76727f53eabf18ad155c6ed7979a681411602fa47"}, + {file = "traitlets-5.12.0.tar.gz", hash = "sha256:833273bf645d8ce31dcb613c56999e2e055b1ffe6d09168a164bcd91c36d5d35"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.6.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, + {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, +] [[package]] name = "types-requests" -version = "2.31.0.2" +version = "2.31.0.6" description = "Typing stubs for requests" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, - {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, + {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, + {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, ] [package.dependencies] @@ -3412,14 +3529,26 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] [[package]] @@ -3439,18 +3568,18 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "1.26.16" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] @@ -3513,14 +3642,14 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.6" +version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" category = "dev" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] [[package]] @@ -3553,31 +3682,31 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.1" +version = "1.6.4" description = "WebSocket client for Python with low level API options" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, + {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, + {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, ] [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] name = "werkzeug" -version = "2.3.6" +version = "2.3.7" description = "The comprehensive WSGI web application library." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Werkzeug-2.3.6-py3-none-any.whl", hash = "sha256:935539fa1413afbb9195b24880778422ed620c0fc09670945185cce4d91a8890"}, - {file = "Werkzeug-2.3.6.tar.gz", hash = "sha256:98c774df2f91b05550078891dee5f0eb0cb797a522c757a2452b9cee5b202330"}, + {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"}, + {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"}, ] [package.dependencies] @@ -3588,14 +3717,14 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "widgetsnbextension" -version = "4.0.8" +version = "4.0.9" description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.8-py3-none-any.whl", hash = "sha256:2e37f0ce9da11651056280c7efe96f2db052fe8fc269508e3724f5cbd6c93018"}, - {file = "widgetsnbextension-4.0.8.tar.gz", hash = "sha256:9ec291ba87c2dfad42c3d5b6f68713fa18be1acd7476569516b2431682315c17"}, + {file = "widgetsnbextension-4.0.9-py3-none-any.whl", hash = "sha256:91452ca8445beb805792f206e560c1769284267a30ceb1cec9f5bcc887d15175"}, + {file = "widgetsnbextension-4.0.9.tar.gz", hash = "sha256:3c1f5e46dc1166dfd40a42d685e6a51396fd34ff878742a3e47c6f0cc4a2a385"}, ] [[package]] @@ -3685,21 +3814,21 @@ files = [ [[package]] name = "zipp" -version = "3.16.2" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" +category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" -python-versions = "^3.8.16" -content-hash = "dd13076e47cb3d15d794c986e0cb6d54a72f36accdf6a5441557da63171cfbb1" +python-versions = ">=3.10.12,<3.13" +content-hash = "fb9ccdfdb1d36a84cc2af2f9540de70bb14027c6d3e6b628c2c123092543413c" diff --git a/pyproject.toml b/pyproject.toml index ca68dfba..a9ab93ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ classifiers = [ [tool.poetry.dependencies] -python = "^3.8.16" +python = ">=3.10.12,<3.13" Flask = "^2.3.2" Flask-Cors = "^4.0.0" @@ -53,6 +53,11 @@ python-dotenv = "^1.0.0" flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" email-validator = "^2.0.0.post2" +flask-caching = "^2.1.0" +pycap = "^2.4.0" +redis = "^5.0.1" +numpy = "^1.26.1" +pandas = "^2.1.1" [tool.poetry.group.dev.dependencies] From 08ea6d3b1b08d2ef13a391ea761b5928f1815ce2 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Thu, 26 Oct 2023 15:56:37 -0700 Subject: [PATCH 317/505] merge: authentication into staging (#17) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added token field to invited contributors * style: 🎨 fix code style issues with Black * fix: signup token * style: 🎨 fix code style issues with Black * fix: signup token * style: 🎨 fix code style issues with Black * fix: signup token * style: 🎨 fix code style issues with Black * style: errors * 🐛 fix: update env read * fix: authentication errors * fix: poetry toml --------- Co-authored-by: Lint Action Co-authored-by: Sanjay Soundarajan --- apis/authentication.py | 14 +- apis/contributor.py | 10 -- apis/dataset.py | 43 +++-- config.py | 4 + model/invited_study_contributor.py | 5 +- poetry.lock | 262 ++++++++--------------------- tests/conftest.py | 1 + 7 files changed, 118 insertions(+), 221 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index ebf36b8d..e48179bf 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -23,6 +23,7 @@ { "email_address": fields.String(required=True, default="sample@gmail.com"), "password": fields.String(required=True, default=""), + "code": fields.String(required=True, default=""), }, ) @@ -48,6 +49,15 @@ class SignUpUser(Resource): def post(self): """signs up the new users and saves data in DB""" data: Union[Any, dict] = request.json + if os.environ.get("FLASK_ENV") != "testing": + if data["email_address"] not in ["test@fairhub.io"]: + invite = model.StudyInvitedContributor.query.filter_by( + email_address=data["email_address"] + ).one_or_none() + if not invite: + return "You are not validated", 403 + if invite.token != data["code"]: + return "signup code does not match", 403 def validate_is_valid_email(instance): # Turn on check_deliverability @@ -93,7 +103,7 @@ def validate_password(instance): # Schema validation schema = { "type": "object", - "required": ["email_address", "password"], + "required": ["email_address", "password", "code"], "additionalProperties": False, "properties": { "email_address": {"type": "string", "format": "valid_email"}, @@ -101,6 +111,7 @@ def validate_password(instance): "type": "string", "format": "password", }, + "code": {"type": "string"}, }, } @@ -121,6 +132,7 @@ def validate_password(instance): invitations = model.StudyInvitedContributor.query.filter_by( email_address=data["email_address"] ).all() + new_user = model.User.from_data(data) for invite in invitations: invite.study.add_user_to_study(new_user, invite.permission) diff --git a/apis/contributor.py b/apis/contributor.py index 72621e80..8208f79f 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -48,16 +48,6 @@ def post(self, study_id: int): email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] - # encoded_jwt_code = jwt.encode( - # { - # "user": user.id, - # "exp": datetime.datetime.now(timezone.utc) - # + datetime.timedelta(minutes=180), # noqa: W503 - # "jti": str(uuid.uuid4()), - # }, # noqa: W503 - # config.FAIRHUB_SECRET, - # algorithm="HS256", - # ) contributor_ = None try: diff --git a/apis/dataset.py b/apis/dataset.py index b334cc29..adc97c14 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -188,17 +188,32 @@ def post(self, study_id: int, dataset_id: int): return dataset_versions.to_dict() -@api.route("/study//dataset//version//publish") -class PublishResource(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version publish") - def post(self, study_id: int, dataset_id: int): - study = model.Study.query.get(study_id) - if not is_granted("publish_version", study): - return "Access denied, you can not modify", 403 - data_obj = model.Dataset.query.get(dataset_id) - data: typing.Union[typing.Any, dict] = request.json - dataset_versions = model.Version.from_data(data_obj, data) - model.db.session.commit() - return dataset_versions.to_dict() +# @api.route("/study//dataset//version//publish") +# class PublishResource(Resource): +# @api.response(201, "Success") +# @api.response(400, "Validation Error") +# @api.doc("version publish") +# def post(self, study_id: int, dataset_id: int, version_id: int): +# study = model.Study.query.get(study_id) +# if not is_granted("publish_version", study): +# return "Access denied, you can not modify", 403 +# data_obj = model.Version.query.get(version_id) +# data: typing.Union[typing.Any, dict] = request.json +# dataset_versions = model.Version.from_data(data_obj, data) +# model.db.session.commit() +# return dataset_versions.to_dict() + + +# +# @api.route("/study//dataset/ +# /version//dataset-metadata") +# class VersionStudyMetadataResource(Resource): +# @api.response(201, "Success") +# @api.response(400, "Validation Error") +# @api.doc("version dataset metadata get") +# def get(self, study_id: int, dataset_id: int, version_id): +# study = model.Study.query.get(study_id) +# if not is_granted("dataset", study): +# return "Access denied, you can not modify", 403 +# version = dataset.dataset_version.get(version_id) +# pass diff --git a/config.py b/config.py index f1d0304b..1c8e35cb 100644 --- a/config.py +++ b/config.py @@ -1,5 +1,9 @@ """Configuration for the application.""" from os import environ +from dotenv import load_dotenv + +# Load environment variables from .env +load_dotenv(".env") FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index 2bd8edab..dd4279e9 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,6 +1,7 @@ import datetime import uuid +import random from .db import db from .study import Study @@ -15,13 +16,13 @@ def __init__(self, study: Study, email_address: str, permission): self.invited_on = datetime.datetime.now(datetime.timezone.utc).timestamp() self.email_address = email_address self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() - self.token = "" + self.token = random.randint(10 ** (7 - 1), (10**7) - 1) __tablename__ = "invited_study_contributor" email_address = db.Column(db.String, nullable=False, primary_key=True) permission = db.Column(db.String, nullable=False) invited_on = db.Column(db.BigInteger, nullable=False) - token = db.Column(db.String, nullable=True) + token = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column( diff --git a/poetry.lock b/poetry.lock index 5b50c575..6e4a0d47 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aniso8601" version = "9.0.1" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" files = [ @@ -19,7 +18,6 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -41,7 +39,6 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = "*" files = [ @@ -53,7 +50,6 @@ files = [ name = "argon2-cffi" version = "21.3.0" description = "The secure Argon2 password hashing algorithm." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -73,7 +69,6 @@ tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -111,7 +106,6 @@ tests = ["pytest"] name = "arrow" version = "1.2.3" description = "Better dates & times for Python" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -126,7 +120,6 @@ python-dateutil = ">=2.7.0" name = "art" version = "6.0" description = "ASCII Art Library For Python" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -141,7 +134,6 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture name = "astroid" version = "2.15.6" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -161,7 +153,6 @@ wrapt = [ name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -179,7 +170,6 @@ test = ["astroid", "pytest"] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -194,7 +184,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -213,7 +202,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -228,7 +216,6 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" -category = "dev" optional = false python-versions = "*" files = [ @@ -240,7 +227,6 @@ files = [ name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -275,7 +261,6 @@ typecheck = ["mypy"] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" -category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -294,7 +279,6 @@ lxml = ["lxml"] name = "black" version = "23.7.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -341,7 +325,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -360,7 +343,6 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "blinker" version = "1.6.2" description = "Fast, simple object-to-object and broadcast signaling" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -372,7 +354,6 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -384,7 +365,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = "*" files = [ @@ -461,7 +441,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -546,7 +525,6 @@ files = [ name = "click" version = "8.1.6" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -561,7 +539,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -573,7 +550,6 @@ files = [ name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -593,7 +569,6 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -669,7 +644,6 @@ toml = ["tomli"] name = "coveragespace" version = "6.0.2" description = "A place to track your code coverage metrics." -category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -684,11 +658,55 @@ docopt = ">=0.6" minilog = ">=2.0" requests = ">=2.28,<3.0" +[[package]] +name = "cryptography" +version = "41.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, + {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, + {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, + {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -716,7 +734,6 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -728,7 +745,6 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -740,7 +756,6 @@ files = [ name = "dicttoxml" version = "1.7.16" description = "Converts a Python dictionary or other native data type into a valid XML string." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -752,7 +767,6 @@ files = [ name = "dill" version = "0.3.7" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -767,7 +781,6 @@ graph = ["objgraph (>=1.7.2)"] name = "dnspython" version = "2.4.2" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -787,7 +800,6 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" -category = "dev" optional = false python-versions = "*" files = [ @@ -798,7 +810,6 @@ files = [ name = "email-validator" version = "2.0.0.post2" description = "A robust email address syntax and deliverability validation library." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -814,7 +825,6 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -829,7 +839,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = "*" files = [ @@ -844,7 +853,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "faker" version = "18.13.0" description = "Faker is a Python package that generates fake data for you." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -859,7 +867,6 @@ python-dateutil = ">=2.4" name = "fastjsonschema" version = "2.18.0" description = "Fastest Python implementation of JSON schema" -category = "dev" optional = false python-versions = "*" files = [ @@ -874,7 +881,6 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -891,7 +897,6 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.2" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -915,7 +920,6 @@ dotenv = ["python-dotenv"] name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." -category = "main" optional = false python-versions = "*" files = [ @@ -931,7 +935,6 @@ Flask = "*" name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" -category = "main" optional = false python-versions = "*" files = [ @@ -946,7 +949,6 @@ Flask = ">=0.9" name = "flask-restx" version = "1.1.0" description = "Fully featured framework for fast, easy and documented API development with Flask" -category = "main" optional = false python-versions = "*" files = [ @@ -970,7 +972,6 @@ test = ["Faker (==2.0.0)", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pyt name = "flask-sqlalchemy" version = "3.0.5" description = "Add SQLAlchemy support to your Flask application." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -986,7 +987,6 @@ sqlalchemy = ">=1.4.18" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -998,7 +998,6 @@ files = [ name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1013,7 +1012,6 @@ python-dateutil = ">=2.7" name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" files = [ @@ -1031,7 +1029,6 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1040,7 +1037,6 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -1049,7 +1045,6 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -1079,7 +1074,6 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -1088,7 +1082,6 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -1109,7 +1102,6 @@ test = ["objgraph", "psutil"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1121,7 +1113,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1141,7 +1132,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.0.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1160,7 +1150,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1172,7 +1161,6 @@ files = [ name = "ipykernel" version = "6.25.0" description = "IPython Kernel for Jupyter" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1186,7 +1174,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1206,7 +1194,6 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.12.2" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1246,7 +1233,6 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" -category = "dev" optional = false python-versions = "*" files = [ @@ -1258,7 +1244,6 @@ files = [ name = "ipywidgets" version = "8.1.0" description = "Jupyter interactive widgets" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1280,7 +1265,6 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1295,7 +1279,6 @@ arrow = ">=0.15.0" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1313,7 +1296,6 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1325,7 +1307,6 @@ files = [ name = "jedi" version = "0.19.0" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1345,7 +1326,6 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1363,7 +1343,6 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." -category = "dev" optional = false python-versions = "*" files = [ @@ -1378,7 +1357,6 @@ dev = ["hypothesis"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1388,14 +1366,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.18.4" +version = "4.19.1" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.18.4-py3-none-any.whl", hash = "sha256:971be834317c22daaa9132340a51c01b50910724082c2c1a2ac87eeec153a3fe"}, - {file = "jsonschema-4.18.4.tar.gz", hash = "sha256:fb3642735399fa958c0d2aad7057901554596c63349f4f6b283c493cf692a25d"}, + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, ] [package.dependencies] @@ -1422,7 +1399,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1438,7 +1414,6 @@ referencing = ">=0.28.0" name = "jupyter" version = "1.0.0" description = "Jupyter metapackage. Install all the Jupyter components in one go." -category = "dev" optional = false python-versions = "*" files = [ @@ -1459,7 +1434,6 @@ qtconsole = "*" name = "jupyter-client" version = "8.3.0" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1469,7 +1443,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1483,7 +1457,6 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-console" version = "6.6.3" description = "Jupyter terminal console" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1495,7 +1468,7 @@ files = [ ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" prompt-toolkit = ">=3.0.30" pygments = "*" pyzmq = ">=17" @@ -1508,7 +1481,6 @@ test = ["flaky", "pexpect", "pytest"] name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1529,7 +1501,6 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "jupyter-events" version = "0.7.0" description = "Jupyter Event System library" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1555,7 +1526,6 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p name = "jupyter-lsp" version = "2.2.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1571,7 +1541,6 @@ jupyter-server = ">=1.1.2" name = "jupyter-server" version = "2.7.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1584,7 +1553,7 @@ anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" jupyter-events = ">=0.6.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1608,7 +1577,6 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-terminals" version = "0.4.4" description = "A Jupyter Server Extension Providing Terminals." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1628,7 +1596,6 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", name = "jupyterlab" version = "4.0.3" description = "JupyterLab computational environment" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1662,7 +1629,6 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1674,7 +1640,6 @@ files = [ name = "jupyterlab-server" version = "2.24.0" description = "A set of server components for JupyterLab and JupyterLab like applications." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1701,7 +1666,6 @@ test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-valida name = "jupyterlab-widgets" version = "3.0.8" description = "Jupyter interactive widgets for JupyterLab" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1709,11 +1673,23 @@ files = [ {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, ] +[[package]] +name = "jwt" +version = "1.3.1" +description = "JSON Web Token library for Python 3." +optional = false +python-versions = ">= 3.6" +files = [ + {file = "jwt-1.3.1-py3-none-any.whl", hash = "sha256:61c9170f92e736b530655e75374681d4fcca9cfa8763ab42be57353b2b203494"}, +] + +[package.dependencies] +cryptography = ">=3.1,<3.4.0 || >3.4.0" + [[package]] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1759,7 +1735,6 @@ files = [ name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1777,7 +1752,6 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1801,16 +1775,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1847,7 +1811,6 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1862,7 +1825,6 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1874,7 +1836,6 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1886,7 +1847,6 @@ files = [ name = "minilog" version = "2.2" description = "Minimalistic wrapper for Python logging." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1898,7 +1858,6 @@ files = [ name = "mistune" version = "3.0.1" description = "A sane and fast Markdown parser with useful plugins and renderers" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1910,7 +1869,6 @@ files = [ name = "mkdocs" version = "1.3.1" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1937,7 +1895,6 @@ i18n = ["babel (>=2.9.0)"] name = "mypy" version = "1.4.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1984,7 +1941,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1996,7 +1952,6 @@ files = [ name = "nbclient" version = "0.8.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -2006,7 +1961,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -2019,7 +1974,6 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= name = "nbconvert" version = "7.7.3" description = "Converting Jupyter Notebooks" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2058,7 +2012,6 @@ webpdf = ["playwright"] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2080,7 +2033,6 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.7" description = "Patch asyncio to allow nested event loops" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2092,7 +2044,6 @@ files = [ name = "notebook" version = "7.0.1" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2117,7 +2068,6 @@ test = ["ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[tes name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2135,7 +2085,6 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2147,7 +2096,6 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2159,7 +2107,6 @@ files = [ name = "pandocfilters" version = "1.5.0" description = "Utilities for writing pandoc filters in python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2171,7 +2118,6 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2187,7 +2133,6 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2199,7 +2144,6 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2211,7 +2155,6 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -2226,7 +2169,6 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" optional = false python-versions = "*" files = [ @@ -2238,7 +2180,6 @@ files = [ name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2250,7 +2191,6 @@ files = [ name = "platformdirs" version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2266,7 +2206,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2282,7 +2221,6 @@ testing = ["pytest", "pytest-benchmark"] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2301,7 +2239,6 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2316,7 +2253,6 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2331,7 +2267,6 @@ wcwidth = "*" name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2358,7 +2293,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2381,7 +2315,6 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -2393,7 +2326,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -2408,7 +2340,6 @@ tests = ["pytest"] name = "pycodestyle" version = "2.11.0" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2420,7 +2351,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2432,7 +2362,6 @@ files = [ name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2450,7 +2379,6 @@ toml = ["tomli (>=1.2.3)"] name = "pyfairdatatools" version = "0.1.3" description = "Tools for AI-READI" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2473,7 +2401,6 @@ validators = ">=0.20.0,<0.21.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2485,7 +2412,6 @@ files = [ name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2500,7 +2426,6 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2518,7 +2443,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.5" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2548,7 +2472,6 @@ testutils = ["gitpython (>3)"] name = "pymdown-extensions" version = "10.1" description = "Extension pack for Python Markdown." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2564,7 +2487,6 @@ pyyaml = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2587,7 +2509,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2606,7 +2527,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-describe" version = "2.1.0" description = "Describe-style plugin for pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2621,7 +2541,6 @@ pytest = ">=4.6,<8" name = "pytest-expecter" version = "3.0" description = "Better testing with expecter and pytest." -category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2633,7 +2552,6 @@ files = [ name = "pytest-random" version = "0.02" description = "py.test plugin to randomize tests" -category = "dev" optional = false python-versions = "*" files = [ @@ -2647,7 +2565,6 @@ pytest = ">=2.2.3" name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2662,7 +2579,6 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2677,7 +2593,6 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2689,7 +2604,6 @@ files = [ name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2701,7 +2615,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "dev" optional = false python-versions = "*" files = [ @@ -2725,7 +2638,6 @@ files = [ name = "pywinpty" version = "2.0.11" description = "Pseudo terminal support for Windows from Python." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2740,7 +2652,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2800,7 +2711,6 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2815,7 +2725,6 @@ pyyaml = "*" name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2905,7 +2814,6 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qtconsole" version = "5.4.3" description = "Jupyter Qt console" -category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2932,7 +2840,6 @@ test = ["flaky", "pytest", "pytest-qt"] name = "qtpy" version = "2.3.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2950,7 +2857,6 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] name = "referencing" version = "0.30.0" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2966,7 +2872,6 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2988,7 +2893,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3003,7 +2907,6 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3015,7 +2918,6 @@ files = [ name = "rpds-py" version = "0.9.2" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3122,7 +3024,6 @@ files = [ name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3139,7 +3040,6 @@ win32 = ["pywin32"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3151,7 +3051,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3163,7 +3062,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -3175,7 +3073,6 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3187,7 +3084,6 @@ files = [ name = "sqlalchemy" version = "2.0.19" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3266,7 +3162,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -3286,7 +3181,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "terminado" version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3307,7 +3201,6 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3326,7 +3219,6 @@ test = ["flake8", "isort", "pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3338,7 +3230,6 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3350,7 +3241,6 @@ files = [ name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -3371,7 +3261,6 @@ files = [ name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3387,7 +3276,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-requests" version = "2.31.0.2" description = "Typing stubs for requests" -category = "main" optional = false python-versions = "*" files = [ @@ -3402,7 +3290,6 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" -category = "main" optional = false python-versions = "*" files = [ @@ -3414,7 +3301,6 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3426,7 +3312,6 @@ files = [ name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3441,7 +3326,6 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3458,7 +3342,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3475,7 +3358,6 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3515,7 +3397,6 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -3527,7 +3408,6 @@ files = [ name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3543,7 +3423,6 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "dev" optional = false python-versions = "*" files = [ @@ -3555,7 +3434,6 @@ files = [ name = "websocket-client" version = "1.6.1" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3572,7 +3450,6 @@ test = ["websockets"] name = "werkzeug" version = "2.3.6" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3590,7 +3467,6 @@ watchdog = ["watchdog (>=2.3)"] name = "widgetsnbextension" version = "4.0.8" description = "Jupyter interactive widgets for Jupyter Notebook" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3602,7 +3478,6 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3687,7 +3562,6 @@ files = [ name = "zipp" version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3702,4 +3576,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "dd13076e47cb3d15d794c986e0cb6d54a72f36accdf6a5441557da63171cfbb1" +content-hash = "266d48f06cb7dac297184d0390f8d4e3404d9f60a2df4a68a4ff81adddf35d5f" diff --git a/tests/conftest.py b/tests/conftest.py index 88f31b11..32c2f8e7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -66,6 +66,7 @@ def _create_user(_test_client): json={ "email_address": "sample@gmail.com", "password": "Testingyeshello11!", + "code": "7654321", }, ) From a966c715c7b87a02dde4cb809d04abadd26d4f29 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 26 Oct 2023 15:58:16 -0700 Subject: [PATCH 318/505] =?UTF-8?q?=F0=9F=A7=90=20wip:=20test=20blob=20acc?= =?UTF-8?q?ess?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- notebooks/azure-blob.ipynb | 153 +++++++++++++++++++++++++++++++++++++ 1 file changed, 153 insertions(+) create mode 100644 notebooks/azure-blob.ipynb diff --git a/notebooks/azure-blob.ipynb b/notebooks/azure-blob.ipynb new file mode 100644 index 00000000..99174582 --- /dev/null +++ b/notebooks/azure-blob.ipynb @@ -0,0 +1,153 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "from os import environ\n", + "from dotenv import load_dotenv\n", + "import datetime\n", + "import requests\n", + "import base64\n", + "import hmac\n", + "import hashlib\n", + "\n", + "\n", + "# Load environment variables from .env\n", + "load_dotenv(\".env\")\n", + "\n", + "AZURE_STORAGE_ACCOUNT_NAME = environ.get(\"AZURE_STORAGE_ACCOUNT_NAME\")\n", + "AZURE_ACCESS_KEY = environ.get(\"AZURE_ACCESS_KEY\")" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": {}, + "outputs": [ + { + "ename": "ConnectionError", + "evalue": "HTTPSConnectionPool(host=\"'b2aiuploadtest.blob.core.windows.net\", port=443): Max retries exceeded with url: /?resource=account (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed'))", + "output_type": "error", + "traceback": [ + "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[1;31mgaierror\u001b[0m Traceback (most recent call last)", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connection.py:174\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 173\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> 174\u001b[0m conn \u001b[39m=\u001b[39m connection\u001b[39m.\u001b[39;49mcreate_connection(\n\u001b[0;32m 175\u001b[0m (\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_dns_host, \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mport), \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtimeout, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mextra_kw\n\u001b[0;32m 176\u001b[0m )\n\u001b[0;32m 178\u001b[0m \u001b[39mexcept\u001b[39;00m SocketTimeout:\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\util\\connection.py:72\u001b[0m, in \u001b[0;36mcreate_connection\u001b[1;34m(address, timeout, source_address, socket_options)\u001b[0m\n\u001b[0;32m 68\u001b[0m \u001b[39mreturn\u001b[39;00m six\u001b[39m.\u001b[39mraise_from(\n\u001b[0;32m 69\u001b[0m LocationParseError(\u001b[39mu\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m'\u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m, label empty or too long\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m%\u001b[39m host), \u001b[39mNone\u001b[39;00m\n\u001b[0;32m 70\u001b[0m )\n\u001b[1;32m---> 72\u001b[0m \u001b[39mfor\u001b[39;00m res \u001b[39min\u001b[39;00m socket\u001b[39m.\u001b[39;49mgetaddrinfo(host, port, family, socket\u001b[39m.\u001b[39;49mSOCK_STREAM):\n\u001b[0;32m 73\u001b[0m af, socktype, proto, canonname, sa \u001b[39m=\u001b[39m res\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\socket.py:918\u001b[0m, in \u001b[0;36mgetaddrinfo\u001b[1;34m(host, port, family, type, proto, flags)\u001b[0m\n\u001b[0;32m 917\u001b[0m addrlist \u001b[39m=\u001b[39m []\n\u001b[1;32m--> 918\u001b[0m \u001b[39mfor\u001b[39;00m res \u001b[39min\u001b[39;00m _socket\u001b[39m.\u001b[39;49mgetaddrinfo(host, port, family, \u001b[39mtype\u001b[39;49m, proto, flags):\n\u001b[0;32m 919\u001b[0m af, socktype, proto, canonname, sa \u001b[39m=\u001b[39m res\n", + "\u001b[1;31mgaierror\u001b[0m: [Errno 11001] getaddrinfo failed", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mNewConnectionError\u001b[0m Traceback (most recent call last)", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:714\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[1;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[0;32m 713\u001b[0m \u001b[39m# Make the request on the httplib connection object.\u001b[39;00m\n\u001b[1;32m--> 714\u001b[0m httplib_response \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_make_request(\n\u001b[0;32m 715\u001b[0m conn,\n\u001b[0;32m 716\u001b[0m method,\n\u001b[0;32m 717\u001b[0m url,\n\u001b[0;32m 718\u001b[0m timeout\u001b[39m=\u001b[39;49mtimeout_obj,\n\u001b[0;32m 719\u001b[0m body\u001b[39m=\u001b[39;49mbody,\n\u001b[0;32m 720\u001b[0m headers\u001b[39m=\u001b[39;49mheaders,\n\u001b[0;32m 721\u001b[0m chunked\u001b[39m=\u001b[39;49mchunked,\n\u001b[0;32m 722\u001b[0m )\n\u001b[0;32m 724\u001b[0m \u001b[39m# If we're going to release the connection in ``finally:``, then\u001b[39;00m\n\u001b[0;32m 725\u001b[0m \u001b[39m# the response doesn't need to know about the connection. Otherwise\u001b[39;00m\n\u001b[0;32m 726\u001b[0m \u001b[39m# it will also try to release it and we'll have a double-release\u001b[39;00m\n\u001b[0;32m 727\u001b[0m \u001b[39m# mess.\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:403\u001b[0m, in \u001b[0;36mHTTPConnectionPool._make_request\u001b[1;34m(self, conn, method, url, timeout, chunked, **httplib_request_kw)\u001b[0m\n\u001b[0;32m 402\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> 403\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_validate_conn(conn)\n\u001b[0;32m 404\u001b[0m \u001b[39mexcept\u001b[39;00m (SocketTimeout, BaseSSLError) \u001b[39mas\u001b[39;00m e:\n\u001b[0;32m 405\u001b[0m \u001b[39m# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.\u001b[39;00m\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:1053\u001b[0m, in \u001b[0;36mHTTPSConnectionPool._validate_conn\u001b[1;34m(self, conn)\u001b[0m\n\u001b[0;32m 1052\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mgetattr\u001b[39m(conn, \u001b[39m\"\u001b[39m\u001b[39msock\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39mNone\u001b[39;00m): \u001b[39m# AppEngine might not have `.sock`\u001b[39;00m\n\u001b[1;32m-> 1053\u001b[0m conn\u001b[39m.\u001b[39;49mconnect()\n\u001b[0;32m 1055\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m conn\u001b[39m.\u001b[39mis_verified:\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connection.py:363\u001b[0m, in \u001b[0;36mHTTPSConnection.connect\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 361\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mconnect\u001b[39m(\u001b[39mself\u001b[39m):\n\u001b[0;32m 362\u001b[0m \u001b[39m# Add certificate verification\u001b[39;00m\n\u001b[1;32m--> 363\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39msock \u001b[39m=\u001b[39m conn \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_new_conn()\n\u001b[0;32m 364\u001b[0m hostname \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mhost\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connection.py:186\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 185\u001b[0m \u001b[39mexcept\u001b[39;00m SocketError \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m--> 186\u001b[0m \u001b[39mraise\u001b[39;00m NewConnectionError(\n\u001b[0;32m 187\u001b[0m \u001b[39mself\u001b[39m, \u001b[39m\"\u001b[39m\u001b[39mFailed to establish a new connection: \u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m\"\u001b[39m \u001b[39m%\u001b[39m e\n\u001b[0;32m 188\u001b[0m )\n\u001b[0;32m 190\u001b[0m \u001b[39mreturn\u001b[39;00m conn\n", + "\u001b[1;31mNewConnectionError\u001b[0m: : Failed to establish a new connection: [Errno 11001] getaddrinfo failed", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mMaxRetryError\u001b[0m Traceback (most recent call last)", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\adapters.py:486\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[1;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[0;32m 485\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> 486\u001b[0m resp \u001b[39m=\u001b[39m conn\u001b[39m.\u001b[39;49murlopen(\n\u001b[0;32m 487\u001b[0m method\u001b[39m=\u001b[39;49mrequest\u001b[39m.\u001b[39;49mmethod,\n\u001b[0;32m 488\u001b[0m url\u001b[39m=\u001b[39;49murl,\n\u001b[0;32m 489\u001b[0m body\u001b[39m=\u001b[39;49mrequest\u001b[39m.\u001b[39;49mbody,\n\u001b[0;32m 490\u001b[0m headers\u001b[39m=\u001b[39;49mrequest\u001b[39m.\u001b[39;49mheaders,\n\u001b[0;32m 491\u001b[0m redirect\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 492\u001b[0m assert_same_host\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 493\u001b[0m preload_content\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 494\u001b[0m decode_content\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 495\u001b[0m retries\u001b[39m=\u001b[39;49m\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mmax_retries,\n\u001b[0;32m 496\u001b[0m timeout\u001b[39m=\u001b[39;49mtimeout,\n\u001b[0;32m 497\u001b[0m chunked\u001b[39m=\u001b[39;49mchunked,\n\u001b[0;32m 498\u001b[0m )\n\u001b[0;32m 500\u001b[0m \u001b[39mexcept\u001b[39;00m (ProtocolError, \u001b[39mOSError\u001b[39;00m) \u001b[39mas\u001b[39;00m err:\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:798\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[1;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[0;32m 796\u001b[0m e \u001b[39m=\u001b[39m ProtocolError(\u001b[39m\"\u001b[39m\u001b[39mConnection aborted.\u001b[39m\u001b[39m\"\u001b[39m, e)\n\u001b[1;32m--> 798\u001b[0m retries \u001b[39m=\u001b[39m retries\u001b[39m.\u001b[39;49mincrement(\n\u001b[0;32m 799\u001b[0m method, url, error\u001b[39m=\u001b[39;49me, _pool\u001b[39m=\u001b[39;49m\u001b[39mself\u001b[39;49m, _stacktrace\u001b[39m=\u001b[39;49msys\u001b[39m.\u001b[39;49mexc_info()[\u001b[39m2\u001b[39;49m]\n\u001b[0;32m 800\u001b[0m )\n\u001b[0;32m 801\u001b[0m retries\u001b[39m.\u001b[39msleep()\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\util\\retry.py:592\u001b[0m, in \u001b[0;36mRetry.increment\u001b[1;34m(self, method, url, response, error, _pool, _stacktrace)\u001b[0m\n\u001b[0;32m 591\u001b[0m \u001b[39mif\u001b[39;00m new_retry\u001b[39m.\u001b[39mis_exhausted():\n\u001b[1;32m--> 592\u001b[0m \u001b[39mraise\u001b[39;00m MaxRetryError(_pool, url, error \u001b[39mor\u001b[39;00m ResponseError(cause))\n\u001b[0;32m 594\u001b[0m log\u001b[39m.\u001b[39mdebug(\u001b[39m\"\u001b[39m\u001b[39mIncremented Retry for (url=\u001b[39m\u001b[39m'\u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m): \u001b[39m\u001b[39m%r\u001b[39;00m\u001b[39m\"\u001b[39m, url, new_retry)\n", + "\u001b[1;31mMaxRetryError\u001b[0m: HTTPSConnectionPool(host=\"'b2aiuploadtest.blob.core.windows.net\", port=443): Max retries exceeded with url: /?resource=account (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed'))", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[1;31mConnectionError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32mc:\\Users\\dev\\Desktop\\api.fairhub.io\\notebooks\\azure-blob.ipynb Cell 2\u001b[0m line \u001b[0;36m6\n\u001b[0;32m 53\u001b[0m headers \u001b[39m=\u001b[39m {\n\u001b[0;32m 54\u001b[0m \u001b[39m'\u001b[39m\u001b[39mx-ms-date\u001b[39m\u001b[39m'\u001b[39m : request_time,\n\u001b[0;32m 55\u001b[0m \u001b[39m'\u001b[39m\u001b[39mx-ms-version\u001b[39m\u001b[39m'\u001b[39m : api_version,\n\u001b[0;32m 56\u001b[0m \u001b[39m'\u001b[39m\u001b[39mContent-Length\u001b[39m\u001b[39m'\u001b[39m: \u001b[39m\"\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[0;32m 57\u001b[0m \u001b[39m'\u001b[39m\u001b[39mAuthorization\u001b[39m\u001b[39m'\u001b[39m : (\u001b[39m'\u001b[39m\u001b[39mSharedKey \u001b[39m\u001b[39m'\u001b[39m \u001b[39m+\u001b[39m storage_account_name \u001b[39m+\u001b[39m \u001b[39m'\u001b[39m\u001b[39m:\u001b[39m\u001b[39m'\u001b[39m \u001b[39m+\u001b[39m signed_string)\n\u001b[0;32m 58\u001b[0m }\n\u001b[0;32m 60\u001b[0m url \u001b[39m=\u001b[39m (\u001b[39m'\u001b[39m\u001b[39mhttps://\u001b[39m\u001b[39m'\u001b[39m \u001b[39m+\u001b[39m storage_account_name \u001b[39m+\u001b[39m \u001b[39m'\u001b[39m\u001b[39m.blob.core.windows.net/?resource=account\u001b[39m\u001b[39m'\u001b[39m)\n\u001b[1;32m---> 62\u001b[0m response \u001b[39m=\u001b[39m requests\u001b[39m.\u001b[39;49mget(url, headers\u001b[39m=\u001b[39;49mheaders)\n\u001b[0;32m 63\u001b[0m \u001b[39mprint\u001b[39m(response\u001b[39m.\u001b[39mtext)\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\api.py:73\u001b[0m, in \u001b[0;36mget\u001b[1;34m(url, params, **kwargs)\u001b[0m\n\u001b[0;32m 62\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mget\u001b[39m(url, params\u001b[39m=\u001b[39m\u001b[39mNone\u001b[39;00m, \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mkwargs):\n\u001b[0;32m 63\u001b[0m \u001b[39m \u001b[39m\u001b[39mr\u001b[39m\u001b[39m\"\"\"Sends a GET request.\u001b[39;00m\n\u001b[0;32m 64\u001b[0m \n\u001b[0;32m 65\u001b[0m \u001b[39m :param url: URL for the new :class:`Request` object.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 70\u001b[0m \u001b[39m :rtype: requests.Response\u001b[39;00m\n\u001b[0;32m 71\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[1;32m---> 73\u001b[0m \u001b[39mreturn\u001b[39;00m request(\u001b[39m\"\u001b[39;49m\u001b[39mget\u001b[39;49m\u001b[39m\"\u001b[39;49m, url, params\u001b[39m=\u001b[39;49mparams, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\api.py:59\u001b[0m, in \u001b[0;36mrequest\u001b[1;34m(method, url, **kwargs)\u001b[0m\n\u001b[0;32m 55\u001b[0m \u001b[39m# By using the 'with' statement we are sure the session is closed, thus we\u001b[39;00m\n\u001b[0;32m 56\u001b[0m \u001b[39m# avoid leaving sockets open which can trigger a ResourceWarning in some\u001b[39;00m\n\u001b[0;32m 57\u001b[0m \u001b[39m# cases, and look like a memory leak in others.\u001b[39;00m\n\u001b[0;32m 58\u001b[0m \u001b[39mwith\u001b[39;00m sessions\u001b[39m.\u001b[39mSession() \u001b[39mas\u001b[39;00m session:\n\u001b[1;32m---> 59\u001b[0m \u001b[39mreturn\u001b[39;00m session\u001b[39m.\u001b[39;49mrequest(method\u001b[39m=\u001b[39;49mmethod, url\u001b[39m=\u001b[39;49murl, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\sessions.py:589\u001b[0m, in \u001b[0;36mSession.request\u001b[1;34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[0m\n\u001b[0;32m 584\u001b[0m send_kwargs \u001b[39m=\u001b[39m {\n\u001b[0;32m 585\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mtimeout\u001b[39m\u001b[39m\"\u001b[39m: timeout,\n\u001b[0;32m 586\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mallow_redirects\u001b[39m\u001b[39m\"\u001b[39m: allow_redirects,\n\u001b[0;32m 587\u001b[0m }\n\u001b[0;32m 588\u001b[0m send_kwargs\u001b[39m.\u001b[39mupdate(settings)\n\u001b[1;32m--> 589\u001b[0m resp \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49msend(prep, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49msend_kwargs)\n\u001b[0;32m 591\u001b[0m \u001b[39mreturn\u001b[39;00m resp\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\sessions.py:703\u001b[0m, in \u001b[0;36mSession.send\u001b[1;34m(self, request, **kwargs)\u001b[0m\n\u001b[0;32m 700\u001b[0m start \u001b[39m=\u001b[39m preferred_clock()\n\u001b[0;32m 702\u001b[0m \u001b[39m# Send the request\u001b[39;00m\n\u001b[1;32m--> 703\u001b[0m r \u001b[39m=\u001b[39m adapter\u001b[39m.\u001b[39;49msend(request, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[0;32m 705\u001b[0m \u001b[39m# Total elapsed time of the request (approximately)\u001b[39;00m\n\u001b[0;32m 706\u001b[0m elapsed \u001b[39m=\u001b[39m preferred_clock() \u001b[39m-\u001b[39m start\n", + "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\adapters.py:519\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[1;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[0;32m 515\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39misinstance\u001b[39m(e\u001b[39m.\u001b[39mreason, _SSLError):\n\u001b[0;32m 516\u001b[0m \u001b[39m# This branch is for urllib3 v1.22 and later.\u001b[39;00m\n\u001b[0;32m 517\u001b[0m \u001b[39mraise\u001b[39;00m SSLError(e, request\u001b[39m=\u001b[39mrequest)\n\u001b[1;32m--> 519\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mConnectionError\u001b[39;00m(e, request\u001b[39m=\u001b[39mrequest)\n\u001b[0;32m 521\u001b[0m \u001b[39mexcept\u001b[39;00m ClosedPoolError \u001b[39mas\u001b[39;00m e:\n\u001b[0;32m 522\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mConnectionError\u001b[39;00m(e, request\u001b[39m=\u001b[39mrequest)\n", + "\u001b[1;31mConnectionError\u001b[0m: HTTPSConnectionPool(host=\"'b2aiuploadtest.blob.core.windows.net\", port=443): Max retries exceeded with url: /?resource=account (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed'))" + ] + } + ], + "source": [ + "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", + "storage_account_key = AZURE_ACCESS_KEY\n", + "api_version = '2018-03-28'\n", + "request_time = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')\n", + "\n", + "string_params = {\n", + " 'verb': 'GET',\n", + " 'Content-Encoding': '',\n", + " 'Content-Language': '',\n", + " 'Content-Length': \"\",\n", + " 'Content-MD5': '',\n", + " 'Content-Type': '',\n", + " 'Date': '',\n", + " 'If-Modified-Since': '',\n", + " 'If-Match': '',\n", + " 'If-None-Match': '',\n", + " 'If-Unmodified-Since': '',\n", + " 'Range': '',\n", + " 'CanonicalizedHeaders': 'x-ms-date:' + request_time + '\\nx-ms-version:' + api_version + '\\n',\n", + " 'CanonicalizedResource': '/' + storage_account_name + '/\\ncomp:properties\\nrestype:service'\n", + "}\n", + "\n", + "string_to_sign = (string_params['verb'] + '\\n'\n", + " + string_params['Content-Encoding'] + '\\n'\n", + " + string_params['Content-Language'] + '\\n'\n", + " + string_params['Content-Length'] + '\\n'\n", + " + string_params['Content-MD5'] + '\\n'\n", + " + string_params['Content-Type'] + '\\n'\n", + " + string_params['Date'] + '\\n'\n", + " + string_params['If-Modified-Since'] + '\\n'\n", + " + string_params['If-Match'] + '\\n'\n", + " + string_params['If-None-Match'] + '\\n'\n", + " + string_params['If-Unmodified-Since'] + '\\n'\n", + " + string_params['Range'] + '\\n'\n", + " + string_params['CanonicalizedHeaders']\n", + " + string_params['CanonicalizedResource'])\n", + "\n", + "def _sign_string(key, string_to_sign):\n", + " key = base64.b64decode(key.encode('utf-8'))\n", + " string_to_sign = string_to_sign.encode('utf-8')\n", + " signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)\n", + " digest = signed_hmac_sha256.digest()\n", + " encoded_digest = base64.b64encode(digest).decode('utf-8')\n", + " return encoded_digest\n", + "\n", + "# signed_string = base64.b64encode(hmac.new(base64.b64decode(storage_account_key), msg=string_to_sign.encode('utf-8'), digestmod=hashlib.sha256).digest()).decode('utf-8')\n", + "# print(signed_string)\n", + "\n", + "signed_string = _sign_string(storage_account_key, string_to_sign=string_to_sign)\n", + "\n", + "headers = {\n", + " 'x-ms-date' : request_time,\n", + " 'x-ms-version' : api_version,\n", + " 'Content-Length': \"\",\n", + " 'Authorization' : ('SharedKey ' + storage_account_name + ':' + signed_string)\n", + "}\n", + "\n", + "url = ('https://' + storage_account_name + '.blob.core.windows.net/?resource=account')\n", + "\n", + "response = requests.get(url, headers=headers)\n", + "print(response.text)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "fairhub-api-dev-env", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.17" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 26 Oct 2023 17:04:22 -0700 Subject: [PATCH 319/505] =?UTF-8?q?=E2=9A=B0=EF=B8=8F=20remove:=20remove?= =?UTF-8?q?=20migration?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...ae7169083_add_column_in_token_blacklist.py | 26 ------------------- 1 file changed, 26 deletions(-) delete mode 100644 alembic/versions/6ebae7169083_add_column_in_token_blacklist.py diff --git a/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py b/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py deleted file mode 100644 index 8f075846..00000000 --- a/alembic/versions/6ebae7169083_add_column_in_token_blacklist.py +++ /dev/null @@ -1,26 +0,0 @@ -"""add column in token_blacklist - -Revision ID: 6ebae7169083 -Revises: 6d4271d33834 -Create Date: 2023-10-09 15:48:38.553510 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "6ebae7169083" -down_revision: Union[str, None] = "6d4271d33834" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - pass - - -def downgrade() -> None: - pass From 6b2b401ffba768e5d8c4bcc3534efe070527397e Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 27 Oct 2023 16:23:07 -0700 Subject: [PATCH 320/505] chore: remove output --- notebooks/azure-blob.ipynb | 57 ++++++++++---------------------------- 1 file changed, 14 insertions(+), 43 deletions(-) diff --git a/notebooks/azure-blob.ipynb b/notebooks/azure-blob.ipynb index 99174582..5a9750ff 100644 --- a/notebooks/azure-blob.ipynb +++ b/notebooks/azure-blob.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -18,63 +18,34 @@ "# Load environment variables from .env\n", "load_dotenv(\".env\")\n", "\n", + "if not environ.get(\"AZURE_STORAGE_ACCOUNT_NAME\"):\n", + " raise ValueError(\"AZURE_STORAGE_ACCOUNT_NAME is not set\")\n", + "\n", "AZURE_STORAGE_ACCOUNT_NAME = environ.get(\"AZURE_STORAGE_ACCOUNT_NAME\")\n", + "\n", + "if not environ.get(\"AZURE_ACCESS_KEY\"):\n", + " raise ValueError(\"AZURE_ACCESS_KEY is not set\")\n", "AZURE_ACCESS_KEY = environ.get(\"AZURE_ACCESS_KEY\")" ] }, { "cell_type": "code", - "execution_count": 38, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "ename": "ConnectionError", - "evalue": "HTTPSConnectionPool(host=\"'b2aiuploadtest.blob.core.windows.net\", port=443): Max retries exceeded with url: /?resource=account (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed'))", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mgaierror\u001b[0m Traceback (most recent call last)", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connection.py:174\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 173\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> 174\u001b[0m conn \u001b[39m=\u001b[39m connection\u001b[39m.\u001b[39;49mcreate_connection(\n\u001b[0;32m 175\u001b[0m (\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_dns_host, \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mport), \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mtimeout, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mextra_kw\n\u001b[0;32m 176\u001b[0m )\n\u001b[0;32m 178\u001b[0m \u001b[39mexcept\u001b[39;00m SocketTimeout:\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\util\\connection.py:72\u001b[0m, in \u001b[0;36mcreate_connection\u001b[1;34m(address, timeout, source_address, socket_options)\u001b[0m\n\u001b[0;32m 68\u001b[0m \u001b[39mreturn\u001b[39;00m six\u001b[39m.\u001b[39mraise_from(\n\u001b[0;32m 69\u001b[0m LocationParseError(\u001b[39mu\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m'\u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m, label empty or too long\u001b[39m\u001b[39m\"\u001b[39m \u001b[39m%\u001b[39m host), \u001b[39mNone\u001b[39;00m\n\u001b[0;32m 70\u001b[0m )\n\u001b[1;32m---> 72\u001b[0m \u001b[39mfor\u001b[39;00m res \u001b[39min\u001b[39;00m socket\u001b[39m.\u001b[39;49mgetaddrinfo(host, port, family, socket\u001b[39m.\u001b[39;49mSOCK_STREAM):\n\u001b[0;32m 73\u001b[0m af, socktype, proto, canonname, sa \u001b[39m=\u001b[39m res\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\socket.py:918\u001b[0m, in \u001b[0;36mgetaddrinfo\u001b[1;34m(host, port, family, type, proto, flags)\u001b[0m\n\u001b[0;32m 917\u001b[0m addrlist \u001b[39m=\u001b[39m []\n\u001b[1;32m--> 918\u001b[0m \u001b[39mfor\u001b[39;00m res \u001b[39min\u001b[39;00m _socket\u001b[39m.\u001b[39;49mgetaddrinfo(host, port, family, \u001b[39mtype\u001b[39;49m, proto, flags):\n\u001b[0;32m 919\u001b[0m af, socktype, proto, canonname, sa \u001b[39m=\u001b[39m res\n", - "\u001b[1;31mgaierror\u001b[0m: [Errno 11001] getaddrinfo failed", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[1;31mNewConnectionError\u001b[0m Traceback (most recent call last)", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:714\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[1;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[0;32m 713\u001b[0m \u001b[39m# Make the request on the httplib connection object.\u001b[39;00m\n\u001b[1;32m--> 714\u001b[0m httplib_response \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_make_request(\n\u001b[0;32m 715\u001b[0m conn,\n\u001b[0;32m 716\u001b[0m method,\n\u001b[0;32m 717\u001b[0m url,\n\u001b[0;32m 718\u001b[0m timeout\u001b[39m=\u001b[39;49mtimeout_obj,\n\u001b[0;32m 719\u001b[0m body\u001b[39m=\u001b[39;49mbody,\n\u001b[0;32m 720\u001b[0m headers\u001b[39m=\u001b[39;49mheaders,\n\u001b[0;32m 721\u001b[0m chunked\u001b[39m=\u001b[39;49mchunked,\n\u001b[0;32m 722\u001b[0m )\n\u001b[0;32m 724\u001b[0m \u001b[39m# If we're going to release the connection in ``finally:``, then\u001b[39;00m\n\u001b[0;32m 725\u001b[0m \u001b[39m# the response doesn't need to know about the connection. Otherwise\u001b[39;00m\n\u001b[0;32m 726\u001b[0m \u001b[39m# it will also try to release it and we'll have a double-release\u001b[39;00m\n\u001b[0;32m 727\u001b[0m \u001b[39m# mess.\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:403\u001b[0m, in \u001b[0;36mHTTPConnectionPool._make_request\u001b[1;34m(self, conn, method, url, timeout, chunked, **httplib_request_kw)\u001b[0m\n\u001b[0;32m 402\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> 403\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_validate_conn(conn)\n\u001b[0;32m 404\u001b[0m \u001b[39mexcept\u001b[39;00m (SocketTimeout, BaseSSLError) \u001b[39mas\u001b[39;00m e:\n\u001b[0;32m 405\u001b[0m \u001b[39m# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.\u001b[39;00m\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:1053\u001b[0m, in \u001b[0;36mHTTPSConnectionPool._validate_conn\u001b[1;34m(self, conn)\u001b[0m\n\u001b[0;32m 1052\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mgetattr\u001b[39m(conn, \u001b[39m\"\u001b[39m\u001b[39msock\u001b[39m\u001b[39m\"\u001b[39m, \u001b[39mNone\u001b[39;00m): \u001b[39m# AppEngine might not have `.sock`\u001b[39;00m\n\u001b[1;32m-> 1053\u001b[0m conn\u001b[39m.\u001b[39;49mconnect()\n\u001b[0;32m 1055\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m conn\u001b[39m.\u001b[39mis_verified:\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connection.py:363\u001b[0m, in \u001b[0;36mHTTPSConnection.connect\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 361\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mconnect\u001b[39m(\u001b[39mself\u001b[39m):\n\u001b[0;32m 362\u001b[0m \u001b[39m# Add certificate verification\u001b[39;00m\n\u001b[1;32m--> 363\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39msock \u001b[39m=\u001b[39m conn \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_new_conn()\n\u001b[0;32m 364\u001b[0m hostname \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mhost\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connection.py:186\u001b[0m, in \u001b[0;36mHTTPConnection._new_conn\u001b[1;34m(self)\u001b[0m\n\u001b[0;32m 185\u001b[0m \u001b[39mexcept\u001b[39;00m SocketError \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m--> 186\u001b[0m \u001b[39mraise\u001b[39;00m NewConnectionError(\n\u001b[0;32m 187\u001b[0m \u001b[39mself\u001b[39m, \u001b[39m\"\u001b[39m\u001b[39mFailed to establish a new connection: \u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m\"\u001b[39m \u001b[39m%\u001b[39m e\n\u001b[0;32m 188\u001b[0m )\n\u001b[0;32m 190\u001b[0m \u001b[39mreturn\u001b[39;00m conn\n", - "\u001b[1;31mNewConnectionError\u001b[0m: : Failed to establish a new connection: [Errno 11001] getaddrinfo failed", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[1;31mMaxRetryError\u001b[0m Traceback (most recent call last)", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\adapters.py:486\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[1;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[0;32m 485\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m--> 486\u001b[0m resp \u001b[39m=\u001b[39m conn\u001b[39m.\u001b[39;49murlopen(\n\u001b[0;32m 487\u001b[0m method\u001b[39m=\u001b[39;49mrequest\u001b[39m.\u001b[39;49mmethod,\n\u001b[0;32m 488\u001b[0m url\u001b[39m=\u001b[39;49murl,\n\u001b[0;32m 489\u001b[0m body\u001b[39m=\u001b[39;49mrequest\u001b[39m.\u001b[39;49mbody,\n\u001b[0;32m 490\u001b[0m headers\u001b[39m=\u001b[39;49mrequest\u001b[39m.\u001b[39;49mheaders,\n\u001b[0;32m 491\u001b[0m redirect\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 492\u001b[0m assert_same_host\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 493\u001b[0m preload_content\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 494\u001b[0m decode_content\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m,\n\u001b[0;32m 495\u001b[0m retries\u001b[39m=\u001b[39;49m\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mmax_retries,\n\u001b[0;32m 496\u001b[0m timeout\u001b[39m=\u001b[39;49mtimeout,\n\u001b[0;32m 497\u001b[0m chunked\u001b[39m=\u001b[39;49mchunked,\n\u001b[0;32m 498\u001b[0m )\n\u001b[0;32m 500\u001b[0m \u001b[39mexcept\u001b[39;00m (ProtocolError, \u001b[39mOSError\u001b[39;00m) \u001b[39mas\u001b[39;00m err:\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\connectionpool.py:798\u001b[0m, in \u001b[0;36mHTTPConnectionPool.urlopen\u001b[1;34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, **response_kw)\u001b[0m\n\u001b[0;32m 796\u001b[0m e \u001b[39m=\u001b[39m ProtocolError(\u001b[39m\"\u001b[39m\u001b[39mConnection aborted.\u001b[39m\u001b[39m\"\u001b[39m, e)\n\u001b[1;32m--> 798\u001b[0m retries \u001b[39m=\u001b[39m retries\u001b[39m.\u001b[39;49mincrement(\n\u001b[0;32m 799\u001b[0m method, url, error\u001b[39m=\u001b[39;49me, _pool\u001b[39m=\u001b[39;49m\u001b[39mself\u001b[39;49m, _stacktrace\u001b[39m=\u001b[39;49msys\u001b[39m.\u001b[39;49mexc_info()[\u001b[39m2\u001b[39;49m]\n\u001b[0;32m 800\u001b[0m )\n\u001b[0;32m 801\u001b[0m retries\u001b[39m.\u001b[39msleep()\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\urllib3\\util\\retry.py:592\u001b[0m, in \u001b[0;36mRetry.increment\u001b[1;34m(self, method, url, response, error, _pool, _stacktrace)\u001b[0m\n\u001b[0;32m 591\u001b[0m \u001b[39mif\u001b[39;00m new_retry\u001b[39m.\u001b[39mis_exhausted():\n\u001b[1;32m--> 592\u001b[0m \u001b[39mraise\u001b[39;00m MaxRetryError(_pool, url, error \u001b[39mor\u001b[39;00m ResponseError(cause))\n\u001b[0;32m 594\u001b[0m log\u001b[39m.\u001b[39mdebug(\u001b[39m\"\u001b[39m\u001b[39mIncremented Retry for (url=\u001b[39m\u001b[39m'\u001b[39m\u001b[39m%s\u001b[39;00m\u001b[39m'\u001b[39m\u001b[39m): \u001b[39m\u001b[39m%r\u001b[39;00m\u001b[39m\"\u001b[39m, url, new_retry)\n", - "\u001b[1;31mMaxRetryError\u001b[0m: HTTPSConnectionPool(host=\"'b2aiuploadtest.blob.core.windows.net\", port=443): Max retries exceeded with url: /?resource=account (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed'))", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[1;31mConnectionError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32mc:\\Users\\dev\\Desktop\\api.fairhub.io\\notebooks\\azure-blob.ipynb Cell 2\u001b[0m line \u001b[0;36m6\n\u001b[0;32m 53\u001b[0m headers \u001b[39m=\u001b[39m {\n\u001b[0;32m 54\u001b[0m \u001b[39m'\u001b[39m\u001b[39mx-ms-date\u001b[39m\u001b[39m'\u001b[39m : request_time,\n\u001b[0;32m 55\u001b[0m \u001b[39m'\u001b[39m\u001b[39mx-ms-version\u001b[39m\u001b[39m'\u001b[39m : api_version,\n\u001b[0;32m 56\u001b[0m \u001b[39m'\u001b[39m\u001b[39mContent-Length\u001b[39m\u001b[39m'\u001b[39m: \u001b[39m\"\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[0;32m 57\u001b[0m \u001b[39m'\u001b[39m\u001b[39mAuthorization\u001b[39m\u001b[39m'\u001b[39m : (\u001b[39m'\u001b[39m\u001b[39mSharedKey \u001b[39m\u001b[39m'\u001b[39m \u001b[39m+\u001b[39m storage_account_name \u001b[39m+\u001b[39m \u001b[39m'\u001b[39m\u001b[39m:\u001b[39m\u001b[39m'\u001b[39m \u001b[39m+\u001b[39m signed_string)\n\u001b[0;32m 58\u001b[0m }\n\u001b[0;32m 60\u001b[0m url \u001b[39m=\u001b[39m (\u001b[39m'\u001b[39m\u001b[39mhttps://\u001b[39m\u001b[39m'\u001b[39m \u001b[39m+\u001b[39m storage_account_name \u001b[39m+\u001b[39m \u001b[39m'\u001b[39m\u001b[39m.blob.core.windows.net/?resource=account\u001b[39m\u001b[39m'\u001b[39m)\n\u001b[1;32m---> 62\u001b[0m response \u001b[39m=\u001b[39m requests\u001b[39m.\u001b[39;49mget(url, headers\u001b[39m=\u001b[39;49mheaders)\n\u001b[0;32m 63\u001b[0m \u001b[39mprint\u001b[39m(response\u001b[39m.\u001b[39mtext)\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\api.py:73\u001b[0m, in \u001b[0;36mget\u001b[1;34m(url, params, **kwargs)\u001b[0m\n\u001b[0;32m 62\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mget\u001b[39m(url, params\u001b[39m=\u001b[39m\u001b[39mNone\u001b[39;00m, \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mkwargs):\n\u001b[0;32m 63\u001b[0m \u001b[39m \u001b[39m\u001b[39mr\u001b[39m\u001b[39m\"\"\"Sends a GET request.\u001b[39;00m\n\u001b[0;32m 64\u001b[0m \n\u001b[0;32m 65\u001b[0m \u001b[39m :param url: URL for the new :class:`Request` object.\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 70\u001b[0m \u001b[39m :rtype: requests.Response\u001b[39;00m\n\u001b[0;32m 71\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[1;32m---> 73\u001b[0m \u001b[39mreturn\u001b[39;00m request(\u001b[39m\"\u001b[39;49m\u001b[39mget\u001b[39;49m\u001b[39m\"\u001b[39;49m, url, params\u001b[39m=\u001b[39;49mparams, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\api.py:59\u001b[0m, in \u001b[0;36mrequest\u001b[1;34m(method, url, **kwargs)\u001b[0m\n\u001b[0;32m 55\u001b[0m \u001b[39m# By using the 'with' statement we are sure the session is closed, thus we\u001b[39;00m\n\u001b[0;32m 56\u001b[0m \u001b[39m# avoid leaving sockets open which can trigger a ResourceWarning in some\u001b[39;00m\n\u001b[0;32m 57\u001b[0m \u001b[39m# cases, and look like a memory leak in others.\u001b[39;00m\n\u001b[0;32m 58\u001b[0m \u001b[39mwith\u001b[39;00m sessions\u001b[39m.\u001b[39mSession() \u001b[39mas\u001b[39;00m session:\n\u001b[1;32m---> 59\u001b[0m \u001b[39mreturn\u001b[39;00m session\u001b[39m.\u001b[39;49mrequest(method\u001b[39m=\u001b[39;49mmethod, url\u001b[39m=\u001b[39;49murl, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\sessions.py:589\u001b[0m, in \u001b[0;36mSession.request\u001b[1;34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[0m\n\u001b[0;32m 584\u001b[0m send_kwargs \u001b[39m=\u001b[39m {\n\u001b[0;32m 585\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mtimeout\u001b[39m\u001b[39m\"\u001b[39m: timeout,\n\u001b[0;32m 586\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mallow_redirects\u001b[39m\u001b[39m\"\u001b[39m: allow_redirects,\n\u001b[0;32m 587\u001b[0m }\n\u001b[0;32m 588\u001b[0m send_kwargs\u001b[39m.\u001b[39mupdate(settings)\n\u001b[1;32m--> 589\u001b[0m resp \u001b[39m=\u001b[39m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49msend(prep, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49msend_kwargs)\n\u001b[0;32m 591\u001b[0m \u001b[39mreturn\u001b[39;00m resp\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\sessions.py:703\u001b[0m, in \u001b[0;36mSession.send\u001b[1;34m(self, request, **kwargs)\u001b[0m\n\u001b[0;32m 700\u001b[0m start \u001b[39m=\u001b[39m preferred_clock()\n\u001b[0;32m 702\u001b[0m \u001b[39m# Send the request\u001b[39;00m\n\u001b[1;32m--> 703\u001b[0m r \u001b[39m=\u001b[39m adapter\u001b[39m.\u001b[39;49msend(request, \u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mkwargs)\n\u001b[0;32m 705\u001b[0m \u001b[39m# Total elapsed time of the request (approximately)\u001b[39;00m\n\u001b[0;32m 706\u001b[0m elapsed \u001b[39m=\u001b[39m preferred_clock() \u001b[39m-\u001b[39m start\n", - "File \u001b[1;32mc:\\Users\\dev\\anaconda3\\envs\\fairhub-api-dev-env\\lib\\site-packages\\requests\\adapters.py:519\u001b[0m, in \u001b[0;36mHTTPAdapter.send\u001b[1;34m(self, request, stream, timeout, verify, cert, proxies)\u001b[0m\n\u001b[0;32m 515\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39misinstance\u001b[39m(e\u001b[39m.\u001b[39mreason, _SSLError):\n\u001b[0;32m 516\u001b[0m \u001b[39m# This branch is for urllib3 v1.22 and later.\u001b[39;00m\n\u001b[0;32m 517\u001b[0m \u001b[39mraise\u001b[39;00m SSLError(e, request\u001b[39m=\u001b[39mrequest)\n\u001b[1;32m--> 519\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mConnectionError\u001b[39;00m(e, request\u001b[39m=\u001b[39mrequest)\n\u001b[0;32m 521\u001b[0m \u001b[39mexcept\u001b[39;00m ClosedPoolError \u001b[39mas\u001b[39;00m e:\n\u001b[0;32m 522\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mConnectionError\u001b[39;00m(e, request\u001b[39m=\u001b[39mrequest)\n", - "\u001b[1;31mConnectionError\u001b[0m: HTTPSConnectionPool(host=\"'b2aiuploadtest.blob.core.windows.net\", port=443): Max retries exceeded with url: /?resource=account (Caused by NewConnectionError(': Failed to establish a new connection: [Errno 11001] getaddrinfo failed'))" - ] - } - ], + "outputs": [], "source": [ "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", "storage_account_key = AZURE_ACCESS_KEY\n", "api_version = '2018-03-28'\n", "request_time = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')\n", "\n", + "\n", + "\n", "string_params = {\n", " 'verb': 'GET',\n", " 'Content-Encoding': '',\n", " 'Content-Language': '',\n", - " 'Content-Length': \"\",\n", + " 'Content-Length': '',\n", " 'Content-MD5': '',\n", " 'Content-Type': '',\n", " 'Date': '',\n", @@ -118,7 +89,7 @@ "headers = {\n", " 'x-ms-date' : request_time,\n", " 'x-ms-version' : api_version,\n", - " 'Content-Length': \"\",\n", + " # 'Content-Length': \"\",\n", " 'Authorization' : ('SharedKey ' + storage_account_name + ':' + signed_string)\n", "}\n", "\n", @@ -145,7 +116,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.17" + "version": "3.8.18" } }, "nbformat": 4, From 756c7c898432c01f2fcf3d3a0d0c861e4c95a971 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 30 Oct 2023 13:18:06 -0700 Subject: [PATCH 321/505] =?UTF-8?q?=F0=9F=9A=91=20fix:=20use=20a=20sas=20t?= =?UTF-8?q?oken?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- notebooks/azure-blob.ipynb | 55 +++++++++++++++++++++++++++++++------- 1 file changed, 46 insertions(+), 9 deletions(-) diff --git a/notebooks/azure-blob.ipynb b/notebooks/azure-blob.ipynb index 5a9750ff..18941500 100644 --- a/notebooks/azure-blob.ipynb +++ b/notebooks/azure-blob.ipynb @@ -13,7 +13,7 @@ "import base64\n", "import hmac\n", "import hashlib\n", - "\n", + "from pprint import pprint\n", "\n", "# Load environment variables from .env\n", "load_dotenv(\".env\")\n", @@ -25,7 +25,40 @@ "\n", "if not environ.get(\"AZURE_ACCESS_KEY\"):\n", " raise ValueError(\"AZURE_ACCESS_KEY is not set\")\n", - "AZURE_ACCESS_KEY = environ.get(\"AZURE_ACCESS_KEY\")" + "\n", + "AZURE_ACCESS_KEY = environ.get(\"AZURE_ACCESS_KEY\")\n", + "\n", + "if not environ.get(\"AZURE_SAS_TOKEN\"):\n", + " raise ValueError(\"AZURE_SAS_TOKEN is not set\")\n", + "\n", + "AZURE_SAS_TOKEN = environ.get(\"AZURE_SAS_TOKEN\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# connection via SAS token\n", + "\n", + "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", + "storage_account_sas_token = AZURE_SAS_TOKEN\n", + "api_version = \"2023-08-03\"\n", + "request_time = datetime.datetime.utcnow().strftime(\"%a, %d %b %Y %H:%M:%S GMT\")\n", + "\n", + "folderName = \"logging\"\n", + "\n", + "url = f\"https://{storage_account_name}.dfs.core.windows.net/{folderName}?recursive=true&resource=filesystem&{storage_account_sas_token}\"\n", + "\n", + "headers = {\n", + " \"x-ms-date\": request_time,\n", + " \"x-ms-version\": api_version,\n", + "}\n", + "\n", + "response = requests.get(url, headers=headers)\n", + "print(response.text)\n", + "# pprint(response.json())" ] }, { @@ -34,15 +67,16 @@ "metadata": {}, "outputs": [], "source": [ + "# connection via shared key\n", + "\n", "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", "storage_account_key = AZURE_ACCESS_KEY\n", - "api_version = '2018-03-28'\n", + "api_version = '2023-08-03'\n", "request_time = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')\n", "\n", "\n", - "\n", "string_params = {\n", - " 'verb': 'GET',\n", + " 'Verb': 'GET',\n", " 'Content-Encoding': '',\n", " 'Content-Language': '',\n", " 'Content-Length': '',\n", @@ -58,7 +92,7 @@ " 'CanonicalizedResource': '/' + storage_account_name + '/\\ncomp:properties\\nrestype:service'\n", "}\n", "\n", - "string_to_sign = (string_params['verb'] + '\\n'\n", + "string_to_sign = (string_params['Verb'] + '\\n'\n", " + string_params['Content-Encoding'] + '\\n'\n", " + string_params['Content-Language'] + '\\n'\n", " + string_params['Content-Length'] + '\\n'\n", @@ -90,13 +124,16 @@ " 'x-ms-date' : request_time,\n", " 'x-ms-version' : api_version,\n", " # 'Content-Length': \"\",\n", - " 'Authorization' : ('SharedKey ' + storage_account_name + ':' + signed_string)\n", + " 'Authorization' : f\"SharedKey {storage_account_name}:{signed_string}\"\n", "}\n", "\n", - "url = ('https://' + storage_account_name + '.blob.core.windows.net/?resource=account')\n", + "dns_suffix = 'dfs.core.windows.net'\n", + "folderName = 'logging'\n", + "\n", + "url = f'https://{storage_account_name}.{dns_suffix}/{folderName}?resource=filesystem'\n", "\n", "response = requests.get(url, headers=headers)\n", - "print(response.text)" + "pprint(response.json())" ] } ], From 1b2ec6d5adafb1e4c215f7e53b64ddb36b0ae87f Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 30 Oct 2023 22:08:22 -0700 Subject: [PATCH 322/505] wip: redcap api and supporting sql --- apis/__init__.py | 23 +- apis/authentication.py | 3 + apis/redcap.py | 327 ++++++++++-------- apis/redcap_data/__init__.py | 8 +- apis/redcap_data/redcap_project_data.py | 103 +++--- .../redcap_report_participant_values_data.py | 241 +++++++------ .../redcap_report_participants_data.py | 50 +-- .../redcap_report_repeat_surveys_data.py | 61 ++-- .../redcap_report_survey_completions_data.py | 280 ++++++++------- app.py | 5 +- model/__init__.py | 6 +- model/study.py | 13 +- model/study_metadata/study_redcap.py | 10 +- model/study_redcap.py | 70 ---- model/study_redcap_project_api.py | 66 ++++ model/study_redcap_project_dashboard.py | 75 ++++ sql/init.sql | 40 +++ sql/init_timezones.sql | 42 ++- sql/specific_tables.sql | 16 + 19 files changed, 872 insertions(+), 567 deletions(-) delete mode 100644 model/study_redcap.py create mode 100644 model/study_redcap_project_api.py create mode 100644 model/study_redcap_project_dashboard.py diff --git a/apis/__init__.py b/apis/__init__.py index df143d8a..eb7c2812 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -2,8 +2,8 @@ from flask_restx import Api, Resource from apis.dataset_metadata_namespace import api as dataset_metadata_namespace -from apis.study_metadata_namespace import api as study_metadata_namespace from apis.redcap_data_namespace import api as redcap_data_namespace +from apis.study_metadata_namespace import api as study_metadata_namespace from .authentication import api as authentication from .contributor import api as contributors_api @@ -24,6 +24,20 @@ from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_title import api as title from .participant import api as participants_api +from .redcap import api as redcap +from .redcap_data.redcap_project_data import api as redcap_project_data +from .redcap_data.redcap_report_participant_values_data import ( + api as redcap_report_participants_values_data, +) +from .redcap_data.redcap_report_participants_data import ( + api as redcap_report_participants_data, +) +from .redcap_data.redcap_report_repeat_surveys_data import ( + api as redcap_report_repeat_surveys_data, +) +from .redcap_data.redcap_report_survey_completions_data import ( + api as redcap_report_survey_completions_data, +) from .study import api as study_api from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd @@ -42,12 +56,6 @@ from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator from .study_metadata.study_status import api as status from .user import api as user -from .redcap import api as redcap -from .redcap_data.redcap_project_data import api as redcap_project_data -from .redcap_data.redcap_report_participants_data import api as redcap_report_participants_data -from .redcap_data.redcap_report_participant_values_data import api as redcap_report_participants_values_data -from .redcap_data.redcap_report_repeat_surveys_data import api as redcap_report_repeat_surveys_data -from .redcap_data.redcap_report_survey_completions_data import api as redcap_report_survey_completions_data api = Api( title="FAIRHUB", @@ -128,4 +136,3 @@ def get(self): api.add_namespace(contributors_api) api.add_namespace(user) api.add_namespace(redcap) - diff --git a/apis/authentication.py b/apis/authentication.py index ebf36b8d..f1badb0d 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -303,6 +303,7 @@ def is_granted(permission: str, study=None): "study_metadata", "dataset_metadata", "make_owner", + # "redcap_access", ], "admin": [ "admin", @@ -320,6 +321,7 @@ def is_granted(permission: str, study=None): "participant", "study_metadata", "dataset_metadata", + # "redcap_access", ], "editor": [ "editor", @@ -333,6 +335,7 @@ def is_granted(permission: str, study=None): "study_metadata", "version", "dataset_metadata", + # "redcap_access", ], "viewer": ["viewer", "view"], } diff --git a/apis/redcap.py b/apis/redcap.py index e3a45b88..b7d2449c 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -1,5 +1,5 @@ """API routes for study redcap""" -import typing +from typing import Any, Union from flask import request from flask_restx import Namespace, Resource, fields @@ -11,168 +11,221 @@ api = Namespace("Redcap", description="Redcap operations", path="/") -redcap_model = api.model( - "Redcap", +redcap_project_api_model = api.model( + "RedcapProjectAPI", { - "id": fields.String(required=True), - "redcap_api_token": fields.String(required=True), - "redcap_api_url": fields.String(required=True), - "redcap_project_id": fields.String(required=True), - "redcap_report_id_survey_completions": fields.String(required=True), - "redcap_report_id_repeat_surveys": fields.String(required=True), - "redcap_report_id_participant_values": fields.String(required=True), - "redcap_report_id_participants": fields.String(required=True), + "study_id": fields.String(required=True), + "project_title": fields.String(required=True), + "project_id": fields.String(required=True), + "project_api_token": fields.String(required=True), + "project_api_url": fields.String(required=True), }, ) +redcap_project_dashboard_model = api.model( + "RedcapProjectDashboard", + { + "project_id": fields.String(required=True), + "dashboard_id": fields.String( + required=True, readonly=True, description="REDCap dashboard ID" + ), + "dashboard_name": fields.String( + required=True, readonly=True, description="REDCap dashboard name" + ), + "report_ids": fields.String( + required=True, readonly=True, description="REDCap project report IDs" + ), + }, +) + + @api.route("/study//redcap") -class Redcap(Resource): +class RedcapProjectAPI(Resource): """Study Redcap Metadata""" - @api.doc("redcap") + @api.doc("redcap_project_api") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_model) - def get(self, study_id: int, redcap_project_id: str): + @api.marshal_with(redcap_project_api_model, as_list=True) + def get(self, study_id: int): """Get study redcap""" - study_ = model.Study.query.get(study_id) - study_redcap_ = study_.study_redcap - return study_redcap_.to_dict() + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + # redcap_project_apis = model.StudyRedcapProjectApi.query.all(study) + redcap_project_apis = model.StudyRedcapProjectApi.query.filter_by(study=study) + return [ + redcap_project_api.to_dict() for redcap_project_api in redcap_project_apis + ] + @api.doc("redcap_project_api") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_model) - def post(self, study_id: int): - """Update study redcap""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "required": [ - "redcap_api_token", - "redcap_api_url", - "redcap_project_id", - "redcap_report_id_survey_completions", - "redcap_report_id_repeat_surveys", - "redcap_report_id_participant_values", - "redcap_report_id_participants", - ], - "properties": { - "redcap_api_token": {"type": string, "minLength": 1}, - "redcap_api_url": {"type": string, "minLength": 1}, - "redcap_project_id": {"type": string, "minLength": 1}, - "redcap_report_id_participants": {"type": string, "minLength": 1}, - "redcap_report_id_survey_completions": {"type": string}, - "redcap_report_id_repeat_surveys": {"type": string}, - "redcap_report_id_participant_values": {"type": string}, - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[typing.Any, dict] = request.json - if len(data["redcap_api_url"]) < 1: - return ( - f"recap_api_url is required for redcap access: {data['redcap_api_url']}", - 400, - ) - if len(data["redcap_api_token"]) < 1: - return ( - f"recap_api_token is required for redcap access: {data['redcap_api_token']}", - 400, - ) - if len(data["redcap_project_id"]) < 1: - return ( - f"recap_project_id is required for redcap access: {data['redcap_project_id']}", - 400, - ) - - study_obj = model.Study.query.get(study_id) - if not is_granted("viewer", study_id): - return "Access denied, you can not modify", 403 + @api.marshal_with(redcap_project_api_model) + def put(self, study_id: int): study = model.Study.query.get(study_id) - study.study_redcap.update(request.json) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + data: Union[Any, dict] = request.json + update_redcap_project_api = model.StudyRedcapProjectApi.query.get( + data["project_id"] + ) + update_redcap_project_api.update(data) model.db.session.commit() + return update_redcap_project_api.to_dict() - return study.study_redcap.to_dict() - -@api.route("/study//redcap/") -class RedcapUpdate(Resource): - @api.doc("redcap") + @api.doc("redcap_project_api") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_model) - def delete(self, study_id: int, redcap_project_id: str): + @api.marshal_with(redcap_project_api_model) + def delete(self, study_id: int): """Delete study redcap metadata""" - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): return "Access denied, you can not delete study", 403 - study_redcap_ = model.StudyRedcap.query.get(study_id) - model.db.session.delete(study_redcap_) + data: Union[Any, dict] = request.json + redcap_project_api = model.StudyRedcapProjectApi.query.get(data["project_id"]) + model.db.session.delete(redcap_project_api) model.db.session.commit() return 204 + +@api.route("/study//redcap/add") +class AddRedcapProjectAPI(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_model) - def put(self, study_id: int): + @api.marshal_with(redcap_project_api_model) + def post(self, study_id: int): """Update study redcap""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "required": [ - "redcap_api_token", - "redcap_api_url", - "redcap_project_id", - "redcap_report_id_survey_completions", - "redcap_report_id_repeat_surveys", - "redcap_report_id_participant_values", - "redcap_report_id_participants", - ], - "properties": { - "redcap_api_token": {"type": string, "minLength": 1}, - "redcap_api_url": {"type": string, "minLength": 1}, - "redcap_project_id": {"type": string, "minLength": 1}, - "redcap_report_id_participants": {"type": string, "minLength": 1}, - "redcap_report_id_survey_completions": {"type": string}, - "redcap_report_id_repeat_surveys": {"type": string}, - "redcap_report_id_participant_values": {"type": string}, - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[typing.Any, dict] = request.json - if len(data["redcap_api_url"]) < 1: - return ( - f"recap_api_url is required for redcap access: {data['redcap_api_url']}", - 400, - ) - if len(data["redcap_api_token"]) < 1: - return ( - f"recap_api_token is required for redcap access: {data['redcap_api_token']}", - 400, - ) - if len(data["redcap_project_id"]) < 1: - return ( - f"recap_project_id is required for redcap access: {data['redcap_project_id']}", - 400, - ) - - study_obj = model.Study.query.get(study_id) - if not is_granted("viewer", study_id): - return "Access denied, you can not modify", 403 study = model.Study.query.get(study_id) - study.study_redcap.update(request.json) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + # Schema validation + data: Union[Any, dict] = request.json + # schema = { + # "type": "object", + # "additionalProperties": False, + # "required": [ + # "project_title", + # "project_id", + # "project_api_url", + # "project_api_token", + # ], + # "properties": { + # "project_title": {"type": "string", "minLength": 1}, + # "project_id": {"type": "string", "minLength": 5}, + # "project_api_url": {"type": "string", "minLength": 1}, + # "project_api_token": {"type": "string", "minLength": 32}, + # }, + # } + + # try: + # validate(request.json, schema) + # except ValidationError as e: + # return e.message, 400 + + # if len(data["project_title"]) < 1: + # return ( + # f"redcap project_title is required for redcap access: {data['project_title']}", + # 400, + # ) + # if len(data["redcap_project_id"]) < 1: + # return ( + # f"redcap project_id is required for redcap access: {data['project_id']}", + # 400, + # ) + # if len(data["redcap_api_url"]) < 1: + # return ( + # f"redcap project_api_url is required for redcap access: {data['project_api_url']}", + # 400, + # ) + # if len(data["project_api_token"]) < 1: + # return ( + # f"redcap project_api_token is required for redcap access: {data['project_api_token']}", + # 400, + # ) + print("data", data) + add_redcap_project_api = model.StudyRedcapProjectApi.from_data(study, data) + model.db.session.add(add_redcap_project_api) model.db.session.commit() - - return study.study_redcap.to_dict() - + print("redcap_project_api", add_redcap_project_api.to_dict()) + return add_redcap_project_api.to_dict(), 201 + + +# @api.route("/study//redcap/") +# class RedcapUpdate(Resource): +# @api.doc("redcap") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_api_model) +# def delete(self, study_id: int, redcap_project_id: str): +# """Delete study redcap metadata""" +# data: Union[Any, dict] = request.json +# if not is_granted("study_metadata", study_id): +# return "Access denied, you can not delete study", 403 +# redcap_project_api = model.StudyRedcapProjectApi.query.get(data["project_id"]) +# model.db.session.delete(redcap_project_api) +# model.db.session.commit() + +# return 204 + +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_model) +# def put(self, study_id: int): +# """Update study redcap""" +# # Schema validation +# schema = { +# "type": "object", +# "additionalProperties": False, +# "required": [ +# "redcap_api_token", +# "redcap_api_url", +# "redcap_project_id", +# "redcap_report_id_survey_completions", +# "redcap_report_id_repeat_surveys", +# "redcap_report_id_participant_values", +# "redcap_report_id_participants", +# ], +# "properties": { +# "redcap_api_token": {"type": string, "minLength": 1}, +# "redcap_api_url": {"type": string, "minLength": 1}, +# "redcap_project_id": {"type": string, "minLength": 1}, +# "redcap_report_id_participants": {"type": string, "minLength": 1}, +# "redcap_report_id_survey_completions": {"type": string}, +# "redcap_report_id_repeat_surveys": {"type": string}, +# "redcap_report_id_participant_values": {"type": string}, +# }, +# } + +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 + +# data: Union[Any, dict] = request.json +# if len(data["redcap_api_url"]) < 1: +# return ( +# f"recap_api_url is required for redcap access: {data['redcap_api_url']}", +# 400, +# ) +# if len(data["redcap_api_token"]) < 1: +# return ( +# f"recap_api_token is required for redcap access: {data['redcap_api_token']}", +# 400, +# ) +# if len(data["redcap_project_id"]) < 1: +# return ( +# f"recap_project_id is required for redcap access: {data['redcap_project_id']}", +# 400, +# ) + +# study_obj = model.Study.query.get(study_id) +# if not is_granted("viewer", study_id): +# return "Access denied, you can not modify", 403 +# study = model.Study.query.get(study_id) +# study.study_redcap.update(request.json) +# model.db.session.commit() + +# return study.study_redcap.to_dict() diff --git a/apis/redcap_data/__init__.py b/apis/redcap_data/__init__.py index 3f1960b0..4f12cfc0 100644 --- a/apis/redcap_data/__init__.py +++ b/apis/redcap_data/__init__.py @@ -1,5 +1,9 @@ from .redcap_project_data import RedcapProjectDataResource +from .redcap_report_participant_values_data import ( + RedcapReportParticipantValuesDataResource, +) from .redcap_report_participants_data import RedcapReportParticipantsDataResource -from .redcap_report_participant_values_data import RedcapReportParticipantValuesDataResource from .redcap_report_repeat_surveys_data import RedcapReportRepeatSurveysDataResource -from .redcap_report_survey_completions_data import RedcapReportSurveyCompletionsDataResource +from .redcap_report_survey_completions_data import ( + RedcapReportSurveyCompletionsDataResource, +) diff --git a/apis/redcap_data/redcap_project_data.py b/apis/redcap_data/redcap_project_data.py index 77ea86e5..f2cec382 100644 --- a/apis/redcap_data/redcap_project_data.py +++ b/apis/redcap_data/redcap_project_data.py @@ -22,46 +22,64 @@ # Import In-Memory Cache -redcap_project_data = api.model("RedcapProject", { - "project_id": fields.String(required=True, readonly=True, description=""), - "project_title": fields.String(required=True, readonly=True, description=""), - "creation_time": fields.String(required=True, readonly=True, description=""), - "production_time": fields.String(required=True, readonly=True, description=""), - "in_production": fields.Boolean(r =True, description=""), - "project_language": fields.String(required=True, readonly=True, description=""), - "purpose": fields.Integer(required=True, readonly=True, description=""), - "purpose_other": fields.Integer(required=True, readonly=True, description=""), - "project_notes": fields.String(required=True, readonly=True, description=""), - "custom_record_label": fields.String(required=True, readonly=True, description=""), - "secondary_unique_field": fields.String( - required=True, readonly=True, description="" - ), - "is_longitudinal": fields.Boolean(required=True, readonly=True, description=""), - "has_repeating_instruments_or_events": fields.Boolean( - required=True, readonly=True, description="" - ), - "surveys_enabled": fields.Boolean(required=True, readonly=True, description=""), - "scheduling_enabled": fields.Boolean(required=True, readonly=True, description=""), - "record_autonumbering_enabled": fields.Boolean( - required=True, readonly=True, description="" - ), - "randomization_enabled": fields.Boolean( - required=True, readonly=True, description="" - ), - "ddp_enabled": fields.Boolean(required=True, readonly=True, description=""), - "project_irb_number": fields.String(required=True, readonly=True, description=""), - "project_grant_number": fields.String(required=True, readonly=True, description=""), - "project_pi_firstname": fields.String(required=True, readonly=True, description=""), - "project_pi_lastname": fields.String(required=True, readonly=True, description=""), - "display_today_now_button": fields.Boolean( - required=True, readonly=True, description="" - ), - "missing_data_codes": fields.String(required=True, readonly=True, description=""), - "external_modules": fields.String(required=True, readonly=True, description=""), - "bypass_branching_erase_field_prompt": fields.Boolean( - required=True, readonly=True, description="" - ), -}) +redcap_project_data = api.model( + "RedcapProject", + { + "project_id": fields.String(required=True, readonly=True, description=""), + "project_title": fields.String(required=True, readonly=True, description=""), + "creation_time": fields.String(required=True, readonly=True, description=""), + "production_time": fields.String(required=True, readonly=True, description=""), + "in_production": fields.Boolean(r=True, description=""), + "project_language": fields.String(required=True, readonly=True, description=""), + "purpose": fields.Integer(required=True, readonly=True, description=""), + "purpose_other": fields.Integer(required=True, readonly=True, description=""), + "project_notes": fields.String(required=True, readonly=True, description=""), + "custom_record_label": fields.String( + required=True, readonly=True, description="" + ), + "secondary_unique_field": fields.String( + required=True, readonly=True, description="" + ), + "is_longitudinal": fields.Boolean(required=True, readonly=True, description=""), + "has_repeating_instruments_or_events": fields.Boolean( + required=True, readonly=True, description="" + ), + "surveys_enabled": fields.Boolean(required=True, readonly=True, description=""), + "scheduling_enabled": fields.Boolean( + required=True, readonly=True, description="" + ), + "record_autonumbering_enabled": fields.Boolean( + required=True, readonly=True, description="" + ), + "randomization_enabled": fields.Boolean( + required=True, readonly=True, description="" + ), + "ddp_enabled": fields.Boolean(required=True, readonly=True, description=""), + "project_irb_number": fields.String( + required=True, readonly=True, description="" + ), + "project_grant_number": fields.String( + required=True, readonly=True, description="" + ), + "project_pi_firstname": fields.String( + required=True, readonly=True, description="" + ), + "project_pi_lastname": fields.String( + required=True, readonly=True, description="" + ), + "display_today_now_button": fields.Boolean( + required=True, readonly=True, description="" + ), + "missing_data_codes": fields.String( + required=True, readonly=True, description="" + ), + "external_modules": fields.String(required=True, readonly=True, description=""), + "bypass_branching_erase_field_prompt": fields.Boolean( + required=True, readonly=True, description="" + ), + }, +) + @api.route("/study//redcap//project") class RedcapProjectDataResource(Resource): @@ -80,7 +98,8 @@ def get(self, study_id: int, redcap_project_id: str): """ study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) + PyCapProject = Project( + study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] + ) project = PyCapProject.export_project_info() return project - diff --git a/apis/redcap_data/redcap_report_participant_values_data.py b/apis/redcap_data/redcap_report_participant_values_data.py index aac486d3..a69cca2e 100644 --- a/apis/redcap_data/redcap_report_participant_values_data.py +++ b/apis/redcap_data/redcap_report_participant_values_data.py @@ -8,6 +8,8 @@ import model from apis.redcap_data_namespace import api +from ..authentication import is_granted + # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig # from modules.etl.config import sexGenderTransformConfig @@ -18,121 +20,130 @@ # # ETL Modules # from modules.etl import transforms -from ..authentication import is_granted # Import In-Memory Cache # from __main__ import IN_MEMORY_CACHE -redcap_report_participant_values_data = api.model("RedcapReportParticipantValuesData", { - "record_id": fields.String( - required=True, readonly=True, description="Participant record ID" - ), - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "siteid": fields.String(required=True, readonly=True, description="Site ID"), - "dm": fields.String( - required=True, readonly=True, description="Data approved for Fairhub.io" - ), - "siteid": fields.String(required=True, readonly=True, description="Site ID"), - "genderid": fields.String( - required=True, readonly=True, description="Gender identity" - ), - "scrsex": fields.String(required=True, readonly=True, description="Sex at birth"), - "race": fields.String(required=True, readonly=True, description="Race"), - "race2": fields.String( - required=True, readonly=True, description="Race further defined" - ), - "ethnic": fields.String(required=True, readonly=True, description="Ethnicity"), - "dvenvyn": fields.String( - required=True, readonly=True, description="Environmental sensor distributed" - ), - "dvenvstdat": fields.String( - required=True, - readonly=True, - description="Date of environmental sensor distribution", - ), - "dvenvcrcid": fields.String( - required=True, - readonly=True, - description="Was environmental sensor demonstrated?", - ), - "dvcgmyn": fields.String( - required=True, readonly=True, description="Continuous glucose monitor inserted" - ), - "dvcgmstdat": fields.String( - required=True, - readonly=True, - description="Date of continuous glucose monitor was inserted", - ), - "dvcgmvrfy": fields.String( - required=True, - readonly=True, - description="Continuous glucose monitor initialized and recording?", - ), - "dvamwyn": fields.String( - required=True, - readonly=True, - description="Was the Apple watch sent home with the participant?", - ), - "dvamwstdat": fields.String( - required=True, - readonly=True, - description="Date Apple watch was given to participant", - ), - "dvamwsn": fields.String( - required=True, readonly=True, description="Apple watch serial number" - ), - "dvrtmthd": fields.String( - required=True, readonly=True, description="Planned method of device return" - ), - "dvrtnyn": fields.String( - required=True, - readonly=True, - description="Was the participant given device return instructions and shipping materials?", - ), - "dvrtnship": fields.String( - required=True, readonly=True, description="Return shipping tracking number" - ), - "mhterm_dm1": fields.String( - required=True, readonly=True, description="Type I diabetes" - ), - "mhterm_dm2": fields.String( - required=True, readonly=True, description="Type II diabetes" - ), - "mhterm_predm": fields.String( - required=True, readonly=True, description="Pre-diabetes" - ), - "mh_dm_age": fields.String( - required=True, readonly=True, description="Age diagnosed with type II diabetes" - ), - "mh_a1c": fields.String( - required=True, readonly=True, description="Elevated A1C levels" - ), - "cmtrt_a1c": fields.String( - required=True, - readonly=True, - description="Taking pills to control A1C and blood glucose levels?", - ), - "cmtrt_insln": fields.String( - required=True, - readonly=True, - description="Injecting insulin to control blood glucose levels", - ), - "cmtrt_glcs": fields.String( - required=True, - readonly=True, - description="Using other injectables to control blood glucose levels", - ), - "cmtrt_lfst": fields.String( - required=True, - readonly=True, - description="Using lifestyle changes to control blood glucose levels", - ), - "scrcmpdat": fields.String( - required=True, readonly=True, description="Screening survey completion date" - ), -}) +redcap_report_participant_values_data = api.model( + "RedcapReportParticipantValuesData", + { + "record_id": fields.String( + required=True, readonly=True, description="Participant record ID" + ), + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "siteid": fields.String(required=True, readonly=True, description="Site ID"), + "dm": fields.String( + required=True, readonly=True, description="Data approved for Fairhub.io" + ), + "siteid": fields.String(required=True, readonly=True, description="Site ID"), + "genderid": fields.String( + required=True, readonly=True, description="Gender identity" + ), + "scrsex": fields.String( + required=True, readonly=True, description="Sex at birth" + ), + "race": fields.String(required=True, readonly=True, description="Race"), + "race2": fields.String( + required=True, readonly=True, description="Race further defined" + ), + "ethnic": fields.String(required=True, readonly=True, description="Ethnicity"), + "dvenvyn": fields.String( + required=True, readonly=True, description="Environmental sensor distributed" + ), + "dvenvstdat": fields.String( + required=True, + readonly=True, + description="Date of environmental sensor distribution", + ), + "dvenvcrcid": fields.String( + required=True, + readonly=True, + description="Was environmental sensor demonstrated?", + ), + "dvcgmyn": fields.String( + required=True, + readonly=True, + description="Continuous glucose monitor inserted", + ), + "dvcgmstdat": fields.String( + required=True, + readonly=True, + description="Date of continuous glucose monitor was inserted", + ), + "dvcgmvrfy": fields.String( + required=True, + readonly=True, + description="Continuous glucose monitor initialized and recording?", + ), + "dvamwyn": fields.String( + required=True, + readonly=True, + description="Was the Apple watch sent home with the participant?", + ), + "dvamwstdat": fields.String( + required=True, + readonly=True, + description="Date Apple watch was given to participant", + ), + "dvamwsn": fields.String( + required=True, readonly=True, description="Apple watch serial number" + ), + "dvrtmthd": fields.String( + required=True, readonly=True, description="Planned method of device return" + ), + "dvrtnyn": fields.String( + required=True, + readonly=True, + description="Was the participant given device return instructions and shipping materials?", + ), + "dvrtnship": fields.String( + required=True, readonly=True, description="Return shipping tracking number" + ), + "mhterm_dm1": fields.String( + required=True, readonly=True, description="Type I diabetes" + ), + "mhterm_dm2": fields.String( + required=True, readonly=True, description="Type II diabetes" + ), + "mhterm_predm": fields.String( + required=True, readonly=True, description="Pre-diabetes" + ), + "mh_dm_age": fields.String( + required=True, + readonly=True, + description="Age diagnosed with type II diabetes", + ), + "mh_a1c": fields.String( + required=True, readonly=True, description="Elevated A1C levels" + ), + "cmtrt_a1c": fields.String( + required=True, + readonly=True, + description="Taking pills to control A1C and blood glucose levels?", + ), + "cmtrt_insln": fields.String( + required=True, + readonly=True, + description="Injecting insulin to control blood glucose levels", + ), + "cmtrt_glcs": fields.String( + required=True, + readonly=True, + description="Using other injectables to control blood glucose levels", + ), + "cmtrt_lfst": fields.String( + required=True, + readonly=True, + description="Using lifestyle changes to control blood glucose levels", + ), + "scrcmpdat": fields.String( + required=True, readonly=True, description="Screening survey completion date" + ), + }, +) + @api.route("/study//redcap//participant-values") class RedcapReportParticipantValuesDataResource(Resource): @@ -144,6 +155,10 @@ class RedcapReportParticipantValuesDataResource(Resource): def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) - participant_values = PyCapProject.export_report(study_redcap_["redcap_report_id_participant_values"]) + PyCapProject = Project( + study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] + ) + participant_values = PyCapProject.export_report( + study_redcap_["redcap_report_id_participant_values"] + ) return participant_values diff --git a/apis/redcap_data/redcap_report_participants_data.py b/apis/redcap_data/redcap_report_participants_data.py index 7dfa2aa7..eed60162 100644 --- a/apis/redcap_data/redcap_report_participants_data.py +++ b/apis/redcap_data/redcap_report_participants_data.py @@ -8,6 +8,8 @@ import model from apis.redcap_data_namespace import api +from ..authentication import is_granted + # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig # from modules.etl.config import sexGenderTransformConfig @@ -18,28 +20,31 @@ # # ETL Modules # from modules.etl import transforms -from ..authentication import is_granted # Import In-Memory Cache -redcap_report_participants_data = api.model("RedcapReportParticipantsData", { - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "dm_inperson_data_validated": fields.Integer( - required=True, - readonly=True, - attribute="dm___i", - description="All data collected and validated through in-person visit", - ), - "dm_device_data_validated": fields.Integer( - required=True, - readonly=True, - attribute="dm___d", - description="All device data entered and validated", - ), -}) +redcap_report_participants_data = api.model( + "RedcapReportParticipantsData", + { + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "dm_inperson_data_validated": fields.Integer( + required=True, + readonly=True, + attribute="dm___i", + description="All data collected and validated through in-person visit", + ), + "dm_device_data_validated": fields.Integer( + required=True, + readonly=True, + attribute="dm___d", + description="All device data entered and validated", + ), + }, +) + @api.route("/study//redcap//participants") class RedcapReportParticipantsDataResource(Resource): @@ -51,7 +56,10 @@ class RedcapReportParticipantsDataResource(Resource): def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) - participants = PyCapProject.export_report(study_redcap_["redcap_report_id_participants"]) + PyCapProject = Project( + study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] + ) + participants = PyCapProject.export_report( + study_redcap_["redcap_report_id_participants"] + ) return participants - diff --git a/apis/redcap_data/redcap_report_repeat_surveys_data.py b/apis/redcap_data/redcap_report_repeat_surveys_data.py index ff706bc0..79bd9c29 100644 --- a/apis/redcap_data/redcap_report_repeat_surveys_data.py +++ b/apis/redcap_data/redcap_report_repeat_surveys_data.py @@ -8,6 +8,8 @@ import model from apis.redcap_data_namespace import api +from ..authentication import is_granted + # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig # from modules.etl.config import sexGenderTransformConfig @@ -18,34 +20,37 @@ # # ETL Modules # from modules.etl import transforms -from ..authentication import is_granted # Import In-Memory Cache # from __main__ import IN_MEMORY_CACHE -redcap_report_repeat_surveys_data = api.model("RedcapReportRepeatSurveysData", { - "record_id": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "current_medications_complete": fields.String( - required=True, - readonly=True, - description="All data collected and validated through in-person visit", - ), - "redcap_repeat_instrument": fields.String( - required=True, - readonly=True, - description="All device data entered and validated", - ), - "redcap_repeat_instance": fields.String( - required=True, - readonly=True, - description="All device data entered and validated", - ), -}) +redcap_report_repeat_surveys_data = api.model( + "RedcapReportRepeatSurveysData", + { + "record_id": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "current_medications_complete": fields.String( + required=True, + readonly=True, + description="All data collected and validated through in-person visit", + ), + "redcap_repeat_instrument": fields.String( + required=True, + readonly=True, + description="All device data entered and validated", + ), + "redcap_repeat_instance": fields.String( + required=True, + readonly=True, + description="All device data entered and validated", + ), + }, +) + @api.route("/study//redcap//repeat-surveys") class RedcapReportRepeatSurveysDataResource(Resource): @@ -57,6 +62,10 @@ class RedcapReportRepeatSurveysDataResource(Resource): def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) - repeat_surveys = PyCapProject.export_report(study_redcap_["redcap_report_id_repeat_surveys"]) + PyCapProject = Project( + study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] + ) + repeat_surveys = PyCapProject.export_report( + study_redcap_["redcap_report_id_repeat_surveys"] + ) return repeat_surveys diff --git a/apis/redcap_data/redcap_report_survey_completions_data.py b/apis/redcap_data/redcap_report_survey_completions_data.py index df8a98ba..318ba229 100644 --- a/apis/redcap_data/redcap_report_survey_completions_data.py +++ b/apis/redcap_data/redcap_report_survey_completions_data.py @@ -8,6 +8,8 @@ import model from apis.redcap_data_namespace import api +from ..authentication import is_granted + # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig # from modules.etl.config import sexGenderTransformConfig @@ -18,142 +20,148 @@ # # ETL Modules # from modules.etl import transforms -from ..authentication import is_granted # Import In-Memory Cache # from __main__ import IN_MEMORY_CACHE -redcap_report_survey_completions_data = api.model("RedcapReportSurveyCompletionsData", { - "record_id": fields.String( - required=True, readonly=True, description="Participant record ID" - ), - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "screening_survey_complete": fields.String( - required=True, readonly=True, description="Screening survey completed" - ), - "study_enrollment_complete": fields.String( - required=True, readonly=True, description="Study enrollment completed" - ), - "recruitment_survey_complete": fields.String( - required=True, readonly=True, description="Recruitment survey completed" - ), - "faq_survey_complete": fields.String( - required=True, readonly=True, description="FAQ survey completed" - ), - "recruitment_survey_management_complete": fields.String( - required=True, - readonly=True, - description="Recruitment survey management completed", - ), - "device_distribution_complete": fields.String( - required=True, readonly=True, description="Device distribution completed" - ), - "preconsent_survey_complete": fields.String( - required=True, readonly=True, description="Pre-consent survey completed" - ), - "consent_survey_complete": fields.String( - required=True, readonly=True, description="Consent survey completed" - ), - "staff_consent_attestation_survey_complete": fields.String( - required=True, - readonly=True, - description="Staff consent attestation survey completed", - ), - "demographics_survey_complete": fields.String( - required=True, readonly=True, description="Demographics survey completed" - ), - "health_survey_complete": fields.String( - required=True, readonly=True, description="Health survey completed" - ), - "substance_use_survey_complete": fields.String( - required=True, readonly=True, description="Substance use survey completed" - ), - "cesd10_survey_complete": fields.String( - required=True, readonly=True, description="CES-D-10 survey completed" - ), - "paid5_dm_survey_complete": fields.String( - required=True, readonly=True, description="PAID-5 DM survey completed" - ), - "diabetes_survey_complete": fields.String( - required=True, readonly=True, description="Diabetes survey completed" - ), - "dietary_survey_complete": fields.String( - required=True, readonly=True, description="Dietary survey completed" - ), - "ophthalmic_survey_complete": fields.String( - required=True, readonly=True, description="Opthalmic survey completed" - ), - "px_sdoh_combined_survey_complete": fields.String( - required=True, readonly=True, description="PhenX SDOH survey completed" - ), - "px_food_insecurity_survey_complete": fields.String( - required=True, - readonly=True, - description="PhenX Food Insecurity survey completed", - ), - "px_neighborhood_environment_survey_complete": fields.String( - required=True, - readonly=True, - description="PhenX Neighborhood Enviroment survey completed", - ), - "px_racial_ethnic_discrimination_survey_complete": fields.String( - required=True, - readonly=True, - description="PhenX Racial/Ethnic Discrimination survey completed", - ), - "decline_participation_survey_complete": fields.String( - required=True, - readonly=True, - description="Decline participation survey completed", - ), - "meds_assessment_complete": fields.String( - required=True, readonly=True, description="Medications assessment completed" - ), - "driving_record_complete": fields.String( - required=True, readonly=True, description="Driving record completed" - ), - "physical_assessment_complete": fields.String( - required=True, readonly=True, description="Physical assessment completed" - ), - "bcva_complete": fields.String( - required=True, readonly=True, description="BCVA completed" - ), - "photopic_mars_complete": fields.String( - required=True, readonly=True, description="Photopic mars completed" - ), - "mesopic_mars_complete": fields.String( - required=True, readonly=True, description="Mesopic mars completed" - ), - "monofilament_complete": fields.String( - required=True, readonly=True, description="Monofilament completed" - ), - "moca_complete": fields.String( - required=True, readonly=True, description="MOCA instrument completed" - ), - "ecg_complete": fields.String( - required=True, readonly=True, description="ECG completed" - ), - "retinal_imaging_v2_complete": fields.String( - required=True, readonly=True, description="Retinal imaging completed" - ), - "lab_results_complete": fields.String( - required=True, readonly=True, description="Lab results completed" - ), - "device_return_complete": fields.String( - required=True, readonly=True, description="Device return completed" - ), - "specimen_management_complete": fields.String( - required=True, readonly=True, description="Specimen management completed" - ), - "disposition_complete": fields.String( - required=True, readonly=True, description="Participant disposition completed" - ), - "data_management_complete": fields.String( - required=True, readonly=True, description="Fairhub.io data management completed" - ), -}) +redcap_report_survey_completions_data = api.model( + "RedcapReportSurveyCompletionsData", + { + "record_id": fields.String( + required=True, readonly=True, description="Participant record ID" + ), + "studyid": fields.String( + required=True, readonly=True, description="Study participant ID" + ), + "screening_survey_complete": fields.String( + required=True, readonly=True, description="Screening survey completed" + ), + "study_enrollment_complete": fields.String( + required=True, readonly=True, description="Study enrollment completed" + ), + "recruitment_survey_complete": fields.String( + required=True, readonly=True, description="Recruitment survey completed" + ), + "faq_survey_complete": fields.String( + required=True, readonly=True, description="FAQ survey completed" + ), + "recruitment_survey_management_complete": fields.String( + required=True, + readonly=True, + description="Recruitment survey management completed", + ), + "device_distribution_complete": fields.String( + required=True, readonly=True, description="Device distribution completed" + ), + "preconsent_survey_complete": fields.String( + required=True, readonly=True, description="Pre-consent survey completed" + ), + "consent_survey_complete": fields.String( + required=True, readonly=True, description="Consent survey completed" + ), + "staff_consent_attestation_survey_complete": fields.String( + required=True, + readonly=True, + description="Staff consent attestation survey completed", + ), + "demographics_survey_complete": fields.String( + required=True, readonly=True, description="Demographics survey completed" + ), + "health_survey_complete": fields.String( + required=True, readonly=True, description="Health survey completed" + ), + "substance_use_survey_complete": fields.String( + required=True, readonly=True, description="Substance use survey completed" + ), + "cesd10_survey_complete": fields.String( + required=True, readonly=True, description="CES-D-10 survey completed" + ), + "paid5_dm_survey_complete": fields.String( + required=True, readonly=True, description="PAID-5 DM survey completed" + ), + "diabetes_survey_complete": fields.String( + required=True, readonly=True, description="Diabetes survey completed" + ), + "dietary_survey_complete": fields.String( + required=True, readonly=True, description="Dietary survey completed" + ), + "ophthalmic_survey_complete": fields.String( + required=True, readonly=True, description="Opthalmic survey completed" + ), + "px_sdoh_combined_survey_complete": fields.String( + required=True, readonly=True, description="PhenX SDOH survey completed" + ), + "px_food_insecurity_survey_complete": fields.String( + required=True, + readonly=True, + description="PhenX Food Insecurity survey completed", + ), + "px_neighborhood_environment_survey_complete": fields.String( + required=True, + readonly=True, + description="PhenX Neighborhood Enviroment survey completed", + ), + "px_racial_ethnic_discrimination_survey_complete": fields.String( + required=True, + readonly=True, + description="PhenX Racial/Ethnic Discrimination survey completed", + ), + "decline_participation_survey_complete": fields.String( + required=True, + readonly=True, + description="Decline participation survey completed", + ), + "meds_assessment_complete": fields.String( + required=True, readonly=True, description="Medications assessment completed" + ), + "driving_record_complete": fields.String( + required=True, readonly=True, description="Driving record completed" + ), + "physical_assessment_complete": fields.String( + required=True, readonly=True, description="Physical assessment completed" + ), + "bcva_complete": fields.String( + required=True, readonly=True, description="BCVA completed" + ), + "photopic_mars_complete": fields.String( + required=True, readonly=True, description="Photopic mars completed" + ), + "mesopic_mars_complete": fields.String( + required=True, readonly=True, description="Mesopic mars completed" + ), + "monofilament_complete": fields.String( + required=True, readonly=True, description="Monofilament completed" + ), + "moca_complete": fields.String( + required=True, readonly=True, description="MOCA instrument completed" + ), + "ecg_complete": fields.String( + required=True, readonly=True, description="ECG completed" + ), + "retinal_imaging_v2_complete": fields.String( + required=True, readonly=True, description="Retinal imaging completed" + ), + "lab_results_complete": fields.String( + required=True, readonly=True, description="Lab results completed" + ), + "device_return_complete": fields.String( + required=True, readonly=True, description="Device return completed" + ), + "specimen_management_complete": fields.String( + required=True, readonly=True, description="Specimen management completed" + ), + "disposition_complete": fields.String( + required=True, + readonly=True, + description="Participant disposition completed", + ), + "data_management_complete": fields.String( + required=True, + readonly=True, + description="Fairhub.io data management completed", + ), + }, +) @api.route("/study//redcap//survey-completions") @@ -166,6 +174,10 @@ class RedcapReportSurveyCompletionsDataResource(Resource): def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project(study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"]) - survey_completions = PyCapProject.export_report(study_redcap_["redcap_report_id_survey_completions"]) + PyCapProject = Project( + study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] + ) + survey_completions = PyCapProject.export_report( + study_redcap_["redcap_report_id_survey_completions"] + ) return survey_completions diff --git a/app.py b/app.py index 3d038ed5..fcacb756 100644 --- a/app.py +++ b/app.py @@ -8,22 +8,23 @@ import jwt from flask import Flask, request from flask_bcrypt import Bcrypt -from flask_cors import CORS from flask_caching import Cache +from flask_cors import CORS from sqlalchemy import MetaData import config import model import modules -from caching import create_cache from apis import api from apis.authentication import UnauthenticatedException, authentication, authorization from apis.exception import ValidationException +from caching import create_cache # from pyfairdatatools import __version__ bcrypt = Bcrypt() + def create_app(config_module=None): """Initialize the core application.""" # create and configure the app diff --git a/model/__init__.py b/model/__init__.py index 7d3845c5..f87180d1 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -48,12 +48,13 @@ from .study_metadata.study_reference import StudyReference from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus +from .study_redcap_project_api import StudyRedcapProjectApi +from .study_redcap_project_dashboard import StudyRedcapProjectDashboard from .token_blacklist import TokenBlacklist from .user import User from .user_details import UserDetails from .version import Version from .version_readme import VersionReadme -from .study_redcap import StudyRedcap __all__ = [ "Study", @@ -95,6 +96,8 @@ "StudyLocation", "StudyOther", "StudyOverallOfficial", + "StudyRedcapProjectApi", + "StudyRedcapProjectDashboard", "StudyReference", "StudySponsorsCollaborators", "StudyStatus", @@ -107,5 +110,4 @@ "UserDetails", "Notification", "VersionReadme", - "StudyRedcap", ] diff --git a/model/study.py b/model/study.py index 7ffea516..09e937b3 100644 --- a/model/study.py +++ b/model/study.py @@ -62,7 +62,6 @@ def __init__(self): lazy="dynamic", cascade="all, delete", ) - study_arm = db.relationship( "StudyArm", back_populates="study", @@ -101,11 +100,13 @@ def __init__(self): back_populates="study", cascade="all, delete", ) - #NOTE: This has not been tested yet - study_redcap = db.relationship( - "StudyRedcap", - back_populates = "study", - cascade="all, delete" + # NOTE: Has not been tested + study_redcap_project_apis = db.relationship( + "StudyRedcapProjectApi", back_populates="study", cascade="all, delete" + ) + # NOTE: Has not been tested + study_redcap_project_dashboards = db.relationship( + "StudyRedcapProjectDashboard", back_populates="study", cascade="all, delete" ) study_intervention = db.relationship( "StudyIntervention", diff --git a/model/study_metadata/study_redcap.py b/model/study_metadata/study_redcap.py index 6a634345..cff9e2a8 100644 --- a/model/study_metadata/study_redcap.py +++ b/model/study_metadata/study_redcap.py @@ -43,7 +43,7 @@ def to_dict(self): "redcap_report_id_survey_completions": self.redcap_report_id_survey_completions, "redcap_report_id_repeat_surveys": self.redcap_report_id_repeat_surveys, "redcap_report_id_participant_values": self.redcap_report_id_participant_values, - "redcap_report_id_participants": self.redcap_report_id_participants + "redcap_report_id_participants": self.redcap_report_id_participants, } @staticmethod @@ -59,9 +59,13 @@ def update(self, data: dict): self.redcap_api_token = data["redcap_api_token"] self.redcap_api_url = data["redcap_api_url"] self.redcap_project_id = data["redcap_project_id"] - self.redcap_report_id_survey_completions = data["redcap_report_id_survey_completions"] + self.redcap_report_id_survey_completions = data[ + "redcap_report_id_survey_completions" + ] self.redcap_report_id_repeat_surveys = data["redcap_report_id_repeat_surveys"] - self.redcap_report_id_participant_values = data["redcap_report_id_participant_values"] + self.redcap_report_id_participant_values = data[ + "redcap_report_id_participant_values" + ] self.redcap_report_id_participants = data["redcap_report_id_participants"] self.study.touch() diff --git a/model/study_redcap.py b/model/study_redcap.py deleted file mode 100644 index 9488bf56..00000000 --- a/model/study_redcap.py +++ /dev/null @@ -1,70 +0,0 @@ -from model import Study - -from .db import db - -class StudyRedcap(db.Model): # type: ignore - """A study is a collection of datasets and participants""" - - def __init__(self, study): - self.study = study - self.redcap_api_token = None - self.redcap_api_url = None - self.redcap_project_id = None - self.redcap_report_id_survey_completions = None - self.redcap_report_id_repeat_surveys = None - self.redcap_report_id_participant_values = None - self.redcap_report_id_participants = None - - __tablename__ = "study_redcap" - - redcap_api_token = db.Column(db.String, nullable=True) - redcap_api_url = db.Column(db.String, nullable=True) - redcap_project_id = db.Column(db.String, nullable=True) - redcap_report_id_survey_completions = db.Column(db.String, nullable=True) - redcap_report_id_repeat_surveys = db.Column(db.String, nullable=True) - redcap_report_id_participant_values = db.Column(db.String, nullable=True) - redcap_report_id_participants = db.Column(db.String, nullable=True) - - study_id = db.Column( - db.CHAR(36), - db.ForeignKey("study.id", ondelete="CASCADE"), - primary_key=True, - nullable=False, - ) - study = db.relationship("Study", back_populates="study_redcap") - - def to_dict(self): - """Converts the study to a dictionary""" - return { - "redcap_api_token": self.redcap_api_token, - "redcap_api_url": self.redcap_api_url, - "redcap_project_id": self.redcap_project_id, - "redcap_report_id_survey_completions": self.redcap_report_id_survey_completions, - "redcap_report_id_repeat_surveys": self.redcap_report_id_repeat_surveys, - "redcap_report_id_participant_values": self.redcap_report_id_participant_values, - "redcap_report_id_participants": self.redcap_report_id_participants - } - - @staticmethod - def from_data(study: Study, data: dict): - """Creates a new study from a dictionary""" - study_redcap = StudyRedcap(study) - study_redcap.update(data) - - return study_redcap - - def update(self, data: dict): - """Updates the study from a dictionary""" - self.redcap_api_token = data["redcap_api_token"] - self.redcap_api_url = data["redcap_api_url"] - self.redcap_project_id = data["redcap_project_id"] - self.redcap_report_id_survey_completions = data["redcap_report_id_survey_completions"] - self.redcap_report_id_repeat_surveys = data["redcap_report_id_repeat_surveys"] - self.redcap_report_id_participant_values = data["redcap_report_id_participant_values"] - self.redcap_report_id_participants = data["redcap_report_id_participants"] - self.study.touch() - - def validate(self): - """Validates the study""" - violations: list = [] - return violations diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py new file mode 100644 index 00000000..203b70aa --- /dev/null +++ b/model/study_redcap_project_api.py @@ -0,0 +1,66 @@ +import uuid +from datetime import datetime, timezone + +from model import Study + +from .db import db +from .study import Study + + +class StudyRedcapProjectApi(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.study = study + self.id = str(uuid.uuid4()) + self.created_at = datetime.now(timezone.utc).timestamp() + + __tablename__ = "study_redcap_project_api" + project_id = db.Column(db.CHAR(5), primary_key=True) + project_title = db.Column(db.String, nullable=False) + project_api_url = db.Column(db.String, nullable=False) + project_api_key = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + updated_on = db.Column(db.BigInteger, nullable=False) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + nullable=False, + ) + study = db.relationship( + "Study", back_populates="study_redcap_project_apis", cascade="all, delete" + ) + study_redcap_project_dashboards = db.relationship( + "StudyRedcapProjectDashboard", back_populates="study_redcap_project_api" + ) + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "study_id": self.study.id, + "project_title": self.project_title, + "project_id": self.project_id, + "project_api_url": self.project_api_url, + "project_api_key": self.project_api_key, + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_redcap_project_api = StudyRedcapProjectApi(study) + study_redcap_project_api.update(data) + return study_redcap_project_api + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.project_title = data["project_title"] + self.project_id = data["project_id"] + self.project_api_url = data["project_api_url"] + self.project_api_key = data["project_api_key"] + self.updated_on = datetime.now(timezone.utc).timestamp() + + def validate(self): + """Validates the study""" + violations: list = [] + return violations diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py new file mode 100644 index 00000000..22565f38 --- /dev/null +++ b/model/study_redcap_project_dashboard.py @@ -0,0 +1,75 @@ +import uuid +from datetime import datetime, timezone + +from sqlalchemy import String +from sqlalchemy.dialects.postgresql import ARRAY + +from model import Study, StudyRedcapProjectApi + +from .db import db +from .study import Study + + +class StudyRedcapProjectDashboard(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.study = study + self.id = str(uuid.uuid4()) + self.created_at = datetime.now(timezone.utc).timestamp() + + __tablename__ = "study_redcap_project_dashboard" + dashboard_id = db.Column(db.CHAR(36), primary_key=True) + dashboard_name = db.Column(db.String, nullable=False) + dashboard_modules = db.Column(ARRAY(String), nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + updated_on = db.Column(db.BigInteger, nullable=False) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + nullable=False, + ) + project_id = db.Column( + db.CHAR(5), + db.ForeignKey("study_redcap_project_api.project_id", ondelete="CASCADE"), + nullable=False, + ) + study = db.relationship( + "Study", back_populates="study_redcap_project_dashboards", cascade="all, delete" + ) + study_redcap_project_api = db.relationship( + "StudyRedcapProjectApi", + back_populates="study_redcap_project_dashboards", + cascade="all, delete", + ) + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "project_id": self.project_id, + "dashboard_id": self.dashboard_id, + "dashboard_name": self.dashboard_name, + "dashboard_endpoint": self.dashboard_endpoint, + "created_at": self.created_at, + "updated_on": self.updated_on, + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_redcap_project_dashboard = StudyRedcapProjectDashboard(study) + study_redcap_project_dashboard.update(data) + return study_redcap_project_dashboard + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.dashboard_id = data["dashboard_id"] + self.dashboard_name = data["dashboard_name"] + self.dashboard_endpoint = data["dashboard_endpoint"] + self.updated_on = datetime.now(timezone.utc).timestamp() + + def validate(self): + """Validates the study""" + violations: list = [] + return violations diff --git a/sql/init.sql b/sql/init.sql index bd8d6c79..5ed11a48 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -931,6 +931,46 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; +-- Dumping structure for table public.study_redcap_project_api +CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( + "study_id" CHAR(36) NOT NULL, + "project_id" BIGINT NOT NULL, + "project_title" VARCHAR NOT NULL, + "project_api_url" VARCHAR NOT NULL, + "project_api_key" CHAR(32) NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("study_id", "project_id"), + CONSTRAINT "study_redcap_project_api_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_redcap_project_api: 1 rows +/*!40000 ALTER TABLE "study_redcap_project_api" DISABLE KEYS */; +INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_redcap_project_api" ENABLE KEYS */; + +-- Dumping structure for table public.study_redcap_project_dashboard +CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( + "study_id" CHAR(36) NOT NULL, + "project_id" BIGINT NOT NULL, + "dashboard_id" CHAR(36) NOT NULL, + "dashboard_name" VARCHAR NOT NULL, + "dashboard_modules" UNKNOWN NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("study_id", "project_id", "dashboard_id"), + CONSTRAINT "study_redcap_project_dashboard_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_redcap_project_dashboard_project_id_fkey" FOREIGN KEY ("project_id") REFERENCES "study_redcap_project_api" ("project_id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_redcap_project_dashboard: 1 rows +/*!40000 ALTER TABLE "study_redcap_project_dashboard" DISABLE KEYS */; +INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_redcap_project_dashboard" ENABLE KEYS */; + + /*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 9edd316b..a8a8b5a2 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -1046,10 +1046,50 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; +-- Dumping structure for table public.study_redcap_project_api +CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( + "study_id" CHAR(36) NOT NULL, + "project_id" BIGINT NOT NULL, + "project_title" VARCHAR NOT NULL, + "project_api_url" VARCHAR NOT NULL, + "project_api_key" CHAR(32) NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("study_id", "project_id"), + CONSTRAINT "study_redcap_project_api_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_redcap_project_api: 1 rows +/*!40000 ALTER TABLE "study_redcap_project_api" DISABLE KEYS */; +INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_redcap_project_api" ENABLE KEYS */; + +-- Dumping structure for table public.study_redcap_project_dashboard +CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( + "study_id" CHAR(36) NOT NULL, + "project_id" BIGINT NOT NULL, + "dashboard_id" CHAR(36) NOT NULL, + "dashboard_name" VARCHAR NOT NULL, + "dashboard_modules" VARCHAR[] NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("study_id", "project_id", "dashboard_id"), + CONSTRAINT "study_redcap_project_dashboard_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_redcap_project_dashboard_project_id_fkey" FOREIGN KEY ("project_id") REFERENCES "study_redcap_project_api" ("project_id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_redcap_project_dashboard: 1 rows +/*!40000 ALTER TABLE "study_redcap_project_dashboard" DISABLE KEYS */; +INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_redcap_project_dashboard" ENABLE KEYS */; + + /*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40111 SET SQL_NOTES=IFNULL(@OLD_SQL_NOTES, 1) */; -COMMIT; \ No newline at end of file +COMMIT; diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index a510f187..60a36289 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -62,6 +62,22 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); + +INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "updated_on", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', '11111', 'ai-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA1', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '22222', 'dev-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA2', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '33333', 'ops-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA3', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '44444', 'data-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '55555', 'more-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', '2023-08-13 16:23:48', '2023-08-13 16:23:49'); + +INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', '11111', '10000000-0000-0000-0000-000000000000', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '22222', '20000000-0000-0000-0000-000000000000', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '33333', '30000000-0000-0000-0000-000000000000', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '44444', '40000000-0000-0000-0000-000000000000', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '55555', '50000000-0000-0000-0000-000000000000', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); + +/*!40000 ALTER TABLE "dataset" ENABLE KEYS */; COMMIT; From d315c6458a304ddb814a675b7dcf4ed1cdc3252f Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 31 Oct 2023 17:00:21 -0700 Subject: [PATCH 323/505] feat: add api endpoint to request files from Azure --- .flake8 | 2 +- .markdownlint.json | 1 + apis/__init__.py | 6 +- apis/file.py | 111 +++++++++++++++++++++++++++++++++++++ apis/study.py | 27 +++++++-- config.py | 3 + dev/image.png | Bin 0 -> 52415 bytes dev/sas_token.md | 3 + notebooks/azure-blob.ipynb | 9 +-- 9 files changed, 151 insertions(+), 11 deletions(-) create mode 100644 apis/file.py create mode 100644 dev/image.png create mode 100644 dev/sas_token.md diff --git a/.flake8 b/.flake8 index 1f518c23..2f20d8ee 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,3 @@ [flake8] max-line-length = 120 -ignore= W293 \ No newline at end of file +ignore= W293,W503 \ No newline at end of file diff --git a/.markdownlint.json b/.markdownlint.json index 1d6b0e1f..bd7eaeff 100644 --- a/.markdownlint.json +++ b/.markdownlint.json @@ -4,6 +4,7 @@ "MD024": { "siblings_only": true }, + "MD033": false, "MD036": false, "MD046": false } diff --git a/apis/__init__.py b/apis/__init__.py index 26c8f39e..bfb5d0c0 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -22,6 +22,7 @@ from .dataset_metadata.dataset_rights import api as rights from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_title import api as title +from .file import api as file_api from .participant import api as participants_api from .study import api as study_api from .study_metadata.study_arm import api as arm @@ -98,7 +99,9 @@ @api.route("/echo", endpoint="echo") -class HelloWorld(Resource): +class HelloEverynyan(Resource): + """Test if the server is active""" + @api.response(200, "Success") @api.response(400, "Validation Error") def get(self): @@ -108,6 +111,7 @@ def get(self): api.add_namespace(study_api) +api.add_namespace(file_api) api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) diff --git a/apis/file.py b/apis/file.py new file mode 100644 index 00000000..68da6aab --- /dev/null +++ b/apis/file.py @@ -0,0 +1,111 @@ +"""APIs for study files""" +import datetime +import importlib +import os + +from urllib.parse import quote + +import requests +from dateutil import tz +from flask_restx import Namespace, Resource, reqparse + +api = Namespace("File", description="File operations", path="/") + + +@api.route("/study//files") +class Files(Resource): + """Files for a study""" + + parser = reqparse.RequestParser() + parser.add_argument("path", type=str, required=False, location="args") + + @api.doc(description="Return a list of all files for a study") + @api.param("path", "The folder path on the file system") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def get(self, study_id): + """Return a list of all files for a study""" + + # todo: anticipating that each study will have a folder in the storage account + # with the same name as the study id. + + # Determine the appropriate configuration module + # based on the testing context + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + + config_module = importlib.import_module(config_module_name) + + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module + + storage_account_name = config.FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME + storage_account_sas_token = config.FAIRHUB_AZURE_READ_SAS_TOKEN + request_time = datetime.datetime.now(datetime.timezone.utc).strftime( + "%a, %d %b %Y %H:%M:%S GMT" + ) + + container = "pooled-data-pilot" # todo: this should be the study id + + query_params = ( + f"recursive=false&resource=filesystem&{storage_account_sas_token}" + ) + + request_args = self.parser.parse_args() + + # subdirectory traversal + if prefix_path := request_args["path"]: + print(prefix_path) + query_path = quote(prefix_path.encode("utf-8")) + query_params = f"directory={query_path}&{query_params}" + + url = f"https://{storage_account_name}.dfs.core.windows.net/{container}?{query_params}" # noqa: E501 # pylint: disable=line-too-long + + print(url) + + api_version = "2023-08-03" + headers = { + "x-ms-date": request_time, + "x-ms-version": api_version, + } + + try: + response = requests.get( + url, + headers=headers, + timeout=30, + ) + + response_json = response.json() + + paths = [] + + for file in response_json["paths"]: + data = { + "contentLength": file["contentLength"], + "creationTime": file["creationTime"], + "name": file["name"], + "isDirectory": bool("isDirectory" in file and file["isDirectory"]), + } + + # convert lastModified to unix timestamp + if "lastModified" in file: + date_string = file["lastModified"] + date_object = datetime.datetime.strptime( + date_string, "%a, %d %b %Y %H:%M:%S %Z" + ) + utc_timestamp = date_object.replace(tzinfo=tz.tzutc()).timestamp() + data["lastModified"] = utc_timestamp + + paths.append(data) + + return paths + except requests.exceptions.RequestException as e: + print(f"An error occurred: {e}") + return "Something went wrong with the request", 500 diff --git a/apis/study.py b/apis/study.py index f05e697e..34186e0f 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,3 +1,4 @@ +"""APIs for study operations""" "" from typing import Any, Union from flask import g, request @@ -22,6 +23,8 @@ @api.route("/study") class Studies(Resource): + """All studies""" + parser_study = reqparse.RequestParser(bundle_errors=True) parser_study.add_argument( "title", type=str, required=True, location="json", help="The title of the Study" @@ -39,17 +42,15 @@ class Studies(Resource): @api.response(400, "Validation Error") # @api.marshal_with(study_model) def get(self): - """this code ensure each user access and see only allowed studies""" - # studies = Study.query.filter( - # Study.study_contributors.any(User.id == g.user.id) - # ).all() - # studies = Study.query.filter(User.id == g.user.id).all() + """Return a list of all studies""" study_contributors = model.StudyContributor.query.filter( model.StudyContributor.user_id == g.user.id ).all() # Filter contributors where user_id matches the user's id + study_ids = [contributor.study_id for contributor in study_contributors] studies = model.Study.query.filter(model.Study.id.in_(study_ids)).all() + return [s.to_dict() for s in studies] @api.expect(study_model) @@ -57,6 +58,7 @@ def get(self): @api.response(400, "Validation Error") def post(self): """Create a new study""" + # Schema validation schema = { "type": "object", @@ -77,22 +79,30 @@ def post(self): add_study = model.Study.from_data(data) model.db.session.add(add_study) + study_id = add_study.id study_ = model.Study.query.get(study_id) + study_contributor = model.StudyContributor.from_data(study_, g.user, "owner") model.db.session.add(study_contributor) + model.db.session.commit() + return study_.to_dict() @api.route("/study/") class StudyResource(Resource): + """Return a study's details""" + @api.doc(description="Get a study's details") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study) def get(self, study_id: int): + """Return a study's details""" study1 = model.Study.query.get(study_id) + return study1.to_dict() @api.expect(study_model) @@ -118,19 +128,25 @@ def put(self, study_id: int): return e.message, 400 update_study = model.Study.query.get(study_id) + if not is_granted("update_study", update_study): return "Access denied, you can not modify", 403 + update_study.update(request.json) model.db.session.commit() + return update_study.to_dict() @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc(description="Delete a study") def delete(self, study_id: int): + """Delete a study""" study = model.Study.query.get(study_id) + if not is_granted("delete_study", study): return "Access denied, you can not delete study", 403 + # for d in study.dataset: # for version in d.dataset_versions: # version.participants.clear() @@ -140,6 +156,7 @@ def delete(self, study_id: int): # model.db.session.delete(d) # for p in study.participants: # model.db.session.delete(p) + model.db.session.delete(study) model.db.session.commit() diff --git a/config.py b/config.py index f1d0304b..4193b7da 100644 --- a/config.py +++ b/config.py @@ -3,3 +3,6 @@ FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") + +FAIRHUB_AZURE_READ_SAS_TOKEN = environ.get("FAIRHUB_AZURE_READ_SAS_TOKEN") +FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = environ.get("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") diff --git a/dev/image.png b/dev/image.png new file mode 100644 index 0000000000000000000000000000000000000000..90ae014ab5c8ece706ac04506a763e2e93c749f3 GIT binary patch literal 52415 zcmd43XIN9)yD#dpu@r%&2nYyRQ9wkgQUWR>N>ypnQ6!Ymq>}(q5fBs*kuD%0H6)=W zln_8^ixOH$0)zySP6#cK0HK`V+W&p`zNhT_;oRq(4?LL(bB>HT<{0CB-(N}eBO_g& z<08io95}%9@WH(&2M!#<9yoBY{m-MEPww(xZsffD;rm4Q?g2#4**VUe!_IdM?;JQ# z9?!k~>vg^nT1sR^? zAeXm>6Bq7$xW~`E+$PF(|8I1kCm!x`hx*(g{MDIgW+)Zyb;JG5+r2-3y;&i91d1NB zd3~0@8vbZm>@CDJivH(|`f3`1o)8r)mU8ckEr6L!6H_(3y^ zp>fD}Zw#`S*}pyg1ybNJ_!p<22Mz?!UK!o{eKaX{S8o5{oYzV4ANvQv@B6s-U;nm# zYq$S;n|fjG;Qqm?V2;rK>&NOpIm2?`z^i(l+5NjdFe&?gq>1DAc~~Kxzv!qKUN1ru zy5jSUg}sdjNa@)j9dg%6hDnl%L?@dZNE8;I7PMZXzY@dn~RL3XK-LGHplG3R!F=S{y$_=Ezc5RpN zG|Ln?^FgHS?9~WqBq+~WPF~^<;zx8 zMJ=%jJy~LF;;+wbj0ErB-A{0$?<{@EjSv}QZB|yQC(5f-yz~5edH36Tby3B=u5ghv z8X$3O$-;;EXwAw}OJhTQ-3wRk&LroGe3G!9m|y%Tt#$J8>*_co%1(uK`$^Z(Ozw8c zTe|Af0t{lPIfn2pGPU~6O?UhP*IFsno1tu>JGx;7$Cpp;`d?$QdihP7GhlvgJ&h|C zrk2za{!q^+W$}R(_8f2GH)#OK+Kknb&HdPzd*MMG3~ zV!0W?m!jrAyI4Lx^!D%YD6>kZSWB4P6Rd9RW0jARK8%pL(co5-3UDJY{j&&(E7McQ z)XoesUj1f-S|lQI)%TA<1M4Yc#)mJah1K!94sV3_23+z)mbl-5(#*-PD5Tt@{9P)t zjQe^KGG@YJ#J+cw>f94-<(pbR3nD$qX-T1P^Lr0*p4b7>`!9FBwh1Yx$)@?%wu{%S z$BN#>c{ZZ`56Z-N`t*Luh!H^jA$R(Q2Yzn6B+1IQ{N~NkI`7w>e^&T(joibJ4SYhm ziLE#1nj(Hx5u8!(QFSo80X?#T+TaJPd2?-@vPGSk#RYTsTfPc&e~Sg`i(SDwvEv6} zS5MdQQ^lWjFF-C0vC5uebE7AoA|jtwM^cq1a1E^`YLo^*p}uP5n71*GKdrldzpo$V zeE!6~^v|9py4(sfAB2$)*JwuV~c3c~bjZc3MRJ6`d&4!tvon$UzUQ&Fxe{aK0 z>&`#=$YcIbWejQzybz#fm#TcJe6C+NkCTe#k|a`JX-~i z{xQIpbW;G7rQ$y1uVM1dHt;e;`#J?WlJNqh8-LwESr|bgma^8i8gDKn+slaoeySK= zClg_9zMw?f^hJr?n=2JopsH8&Ut%|YFg!m9S6@Y`I9qptmR4*VE})ua_UA&4_~{QK zi>kn!AdSTxUgVBtuC<`7-eEg+DsyO*%#WYus{VHP+3z#${sJ>z@zkJ|Ns%_>XGvHR zY1b>fY$x+~u&S~9R}_<}9-I3eog4)JzsIkswL~50)(B*4M=MxYYYPMQbCjIfSz;gC zAvR(jH-?b8D5=9UV5vhr>ywXxobEJrCz%*7Nrd>%OU%k*N_mb{ZVJ6Fgq;AYan7)n zBkgAnH$^|8{3o>*NC040m};`rF{F+3Nj58`2(k0T zh|wds!xkzYJPTc}M*tQ9TouvPq&P++`TfU80Whq%7YTebpB4^|eVlzsgB17MunSq}lZM zVZO~CS{#1y8CKr)7ITMvs~|TLjI#fdSsv)t?xjQ6==>^kh;eJ#RcG{bnwwku2q(zM z3|b1~(|$hXf|v~p8xFe6R6a}@t`JjG_?d$5#~q1ToO?Ni*4|{A{mJ=`8@7n2vpd`A z_K$G3TF8Sc8cGb)v$3vM^(C~>bHEC;jEMq{+YQO%b`s>jEgd-n)sk(`cB_}f+2NPI zUSnoAl%OU|0L0xL7APptzz{a`HaXVCpHW%j>ZY88)x-8NP=7304Q>gUCd)F^5z-uOeZuJVCyxX2HLp1=7wWyZqraB08p z(d{|in6+hFHbSqg{524vx;xe|I?-g(OQ|Mkc;Df``v zKf|1?P_@>X$Z79hpGe1Ey4_3|9aCu%*tp;}82^AqYm01^dA5b*Gf7sPmS}fCikKm{ zyoIYu&1O3PXf&?@Ra3VOGDqY?{&>-N&q~P3*s0wI&>v`qaNyj<&zLl3pD1s=2_7DK z+=6WS7`e%ZAbz=7d=-J_ge2cfgQb<;vGd9j#OOyjY778_0wb)0hJgS<53`wK8TR=w z@IgZZp1{aOUmk*8+*C?yq-=+eG_2@x?=pF}N#I5AdeU(gBc_v5xsvJ#E%EOzZ;zDQ zI5hFDT7@=eXl<5RqghgwguR9Xi& zo##+BJ8D%XZZ_Fx43bk0I;T2RjK!>l0q(j3^DDxdu}Uz3iH;UVTQqh#P*M6FG$ zN^M~rj~-JS3_K|y=WQ&94{Fx}yq2VAQ9blWHx{f&QFm9jnneJCMk}rlWcV{_GAD!J z+|885;g*4T+nJsjb4PN8^6<5~7jP@xC?!Q6fUvVnrOfcbbEAV=b-vpbUAqx?XSuIb zXW2B|=8We7V1-l!bysCC!W+t2jfu&FnQuFs~xreCx}jUpScDZI`_e)A=2dF z3MuBE?xXJ$ugrz&qEpUWr`dKiiHcFvZ^aE|t$;ehNgYF7j*hDVuYZ<%bW~9S_je2% zZjTRcBN_mBZY}6x7oPJMOjpPwV9GEh_c2DSF7l^;oFV= zV5$-i2ne(fESNgAfX&=IiO(NA(FcU+?IT3TLHFLfAnh6`N zQeAs0;8F;EzO_`{<~;2m>{tKy__!|gcbfiN*({dqon)) zvljxK1Y`4aD;XppMlE^m;Uv|IY^WJ=3)G7j@0-7?a-1~!UQPp986FYE8`&*1uf`R9jksbOLz*+`xYJ{aS4u@FBfc>e2R?NY-{rMiR%EuzjK z2urg;7vDGZ`+3NB0@j%RvvY=K;G*YwS&LiAK5z2U0d>H=x_s8nvz|b zSB1;f2gS=2u_~MW>7LZj{@;j-kgg*%&&Ho^z@KnOyIm^$xxu2LOP)KYXDKXotG#eEvn#e@A~k$ zXuDh_QMhc%;xRIn7nE7Ou%}3qen@FhpAN%lVvwO|;E;!$DKV?m$vuUqtx$X^2n+#N z6@9z+UNEWV=D=`eIQ8ORhsTgCX5G#G`@Dhm;l=LQ!bcvxgdI;db8XI%L^&US?wH!- z=Nvu_4SsYe+!>c=k(Buu@GLxSzrKu0YYu$&G{)^gn0qV?qg}Fn@7@urnwmeW%TrAE z+q(btvb3K<^di%4YnCP5JW=F1^?1dtQX_?w%s{rhRuh6#TQ+cN%jPmmui$oyn(9wv zpac0-rNr*WY|jNTf3oOoiK%nzrF5ZcKfU-7r@*3a?)ol*C4q*98YCS#0#pZvP<93s z4n=#^hmab|$xqSE24s&;_q=K?9L(!+uvu`Y(5V#*`+?a-L!h;JwFs{shSDHm`xNQ& zOFydHDa3Vfal&fuU1N8X!y-#>Z{>Y2EwRg@=DhtS`F6C}FAL?FzP%c5J{b~D8M3(TY5m>o9v26V^mTFpB zG~7n^jOle>DmEvaEq_grvdOE%eb=0EU=UxpoK>AWvP>a-r>3}~LaC%&TTaF_n}m8? z^CXZHU_Wc~fF|QG0)RlfmvZZEgwW@M>|ae5h&J6sCo?qlFp!n6jg}81ciYxKvP&}{ z@~GUa2X;S0E6A|_)X)CCZVF6{a!AB8ZOUeI<|)ygM3s`SdE}OoX!zB-4uG%w*Q)Q= zPRsufOW7AD6Z@4c&s~Sw7`(ZbD#fknPa@_)eVn(0lrZx5?OSDjg@| z(|*I!i%$v|chhl_R%{WRTfE|<)`WX;Xv&p(g4Gr{R_}K5`X|H6?NA-a~CooDPygCBw^Kk^)BVMI1 zfM!=$+0)Dmfu?$5R|tE_romCdjt9%NI8P)-zVTss`E%p=o*H0nG0q*|{PPG;EV*7a zb4>A8tsP@%8jW#}>KU5Gaf;-C7w!fd^*2W0xfz8|t>+^4PAH4%tJ^iIGF#jY!3lO- zm4W_q1fy8Tm+WIO4fOA`skyF z?tU0?n3v?mB5FMXy`eBj3$xS|MA`_f9C;)Ps4_RM7aaVxY510o{Vr}zKJ#ydulu;3 zStqZMYZ7-by2JBMXlNwl-A?*1V)%FuGJSA9%ed44#>qPC9ywbHUrtxcpu^?K))IA5 zfs-}q*2NjWG$23^)k zxdI9=g&P|ZZ*LjKzOSMo9K@!}_o`H+o`g0nee8%ul8DC^=M0@P7L~KCGi<#2)XVAq zsxEdm{lyqNDnh=pD`6hU&y2!SKkc|`moU`wS9MCN!LJF(lS_}LQa}c?-R9wz1@!QC z(<;1|ntR4+zR-=M&~2I69C?NKkr{c?pBPBY4mBe@E88&bx(fl%ZE-n0kKIXlIyg?O zwc+88FIKI|zNIf5KjM9M-gYElE9oilAbMtcBfn-J?^J2~VEuU{qY^%aVr^WvPZ$~h z9rq1V-aPg!=vLoh>-*(i*q(Z`Gn^wS5I99;ull`fpP()+k7Qr2FiZ_^0=1LHGa!)X zBilcdKcCyrKM&xe`!c*1mgt(-07Oo4e@Y2Ul3cS={y8#YBkD7;qE9sz9NzQz3*CtS z4I*2d%2DxZA(&U+<)J(VF!d^FZ>7EI|G>pI`u_r$JLdxb!JboN|H}+OrjVME?d3&E zG+rb@masEz20dI_8+Sq%eaU|I{wPu3oeeyRieK96P~Ykw+sPAEP+fwK8P8VlQewBJ z$eOcl=PW)0_E+eayGt=s*@F^c#SbOeL+zuf9l<;6q;*ki+G*OH*_}HZEJwyzWAG%; z&T_j6x9uOI4Qk>$msQ@{m;tuiJ|FUWRk6QG0js~HtB^&g@nLIuo-V5k6(;b@S@i2F zewO(1INC|4-w9kDrci}yS2!>-5rcVVpk>|mUTWgR77AHq?jcmEh~Oj>b*B_UTX^=< z${KeCxizrFwotKR(1I5H7}?(V?zcM2LtMnGzakk4LTcJhPf40QD|mG|h@pE%s_cY9 zuw7dk2jdm33%Fv~_5jyJLO~+8D`NE2&kpX%y?9Kh$oDANUOJ_NuX;rA50kdNb%F8`Fwb_GST1pxn^j1P>uC=JNCr&&W_fjWN@N%5W|LODXZ+Gc{f`V+$SFGQSr;r^X}7a~Y(`Bez7xpFDHhFVU@t9{$K{nt z#2>a+o8QWC{# zx3g9NYEN4=9+k0a9{DE;ICI!S^kZJLh~D5=4^!LRWZdCc!<#(ViKA?_wY36iN_z59 zHikFj)yL8!v8KqIy@r25O3XyjrAC9v$!3E^&Kn-kpU2a|KY8w%h2%lhB2^LL#qFS_ z^Kf#=( z^aWQ38ins1%f`#dTxEA~QEw;2RF1w^M_#qrzv=v%hR6NdR}4NF3V3-vp-sa(*pL)6 zSMA=9^{_rnRyIMq`p6XeJ`DLIvIUNv}G6Vg)(u-IqXqIM2_#2bVCPEfvat{yP8$qZrT0bOp_3|hl;Ij7|m}# zvLV@`dKEY~Ik{!4oUhsaLldai-ABm=WCI?M<8VRO9me!*PhzKQJGW43(3wwRMwrTM zd{+{Dq1j&9z;=T>)l?`FW9H|3$(N^IDt>cu?u_hWm&8)JIXtgfP3t*gwwgK^lY33* zI_GJudCQE3QP8Idrk)z>pw3$9YT7TLxNwEkRI#dVaks2fupyTc}0WrnyuF(WwM8| zlG4}oN)2Wdi@tvWrgiSFf=IHBPOm+FXqYz|RlV5r(YZLYJ{|KM=Nta4A24oIzA*8M z*uCC0?N+71>EVIZ@7>3-p%4@GYa*(}DbE*cg`O{w_>og0hVFm45~7hO9-7Gr2q)$l zJSdg6plx~2n6=}4Ma8+RXbI@gab*QvbK??4>r;&b5*kk@Bar8oM}VjlOzi{zOpE*P z@oLohF|YWS3o9dWpE#*U9G0ZYrF>2I{hOlcF=GwaM9spKD_7ZLDo$FYq@5`)yKIkw z(#$S=Snl5e*HPxp5a!#k$g2gB$TbXS!7?9&-^T?%^lI_QcnzBA`Or8lT07yX!v{A^ z&QOi_*fu!Hxu1{yocrlghHW_XeZgx;tND|Ub+L~cfN zz^~xLQSe|;m~H3MT2|5W(uAQFbs*+EfhXOy_Jv*Go>w2lcNpy`Eq}gutjCB)_FbLu zm5X>)9wq0-gYLt72r2g7&(1KlgUEJvtZk#gsrKgwk#UE`pht4uzCeE@k$se|-6|=V zSM{`dXFHZ~LCN$mO+Nl-*9iqUhhymQQ9$wsD|MWNQ!5gv8T*mpk0ImVBo7YKw5pyd1U7EXYh0nQ#{tDV}-u2$>5kz?qB$H$?%;uSD>AlVQ^gt@F zY6O)pY&jgviQkmI$zdnD8bX&7^{1f78)CE?aa?jmHR~fAs}L2YEzG%~sp{{!Eb0uI zygr%8UZ+BqYHubw9$K2wtTtqsSzvCG;|I6g*e*?7-vbxJ5%gyuI>~@XDtzT|1xEcAK z=$B*n<3OO|;llTMp7#*@naw@6UB-fsMcY28f{lkh<_ld6JjJKJA5S^R>ca>s=P3%N zED+|>5}JWgKa|`=Zha;Aa*w{V;jE3Oujn!5;i&=(4RO{F1Zsj|-^Xvfo5dPh+03Ng z5)GsO-YjnL8(vsr(jM)EcnpO+%`AL?CB08BP*|lg@ku1o|`d6$66tZ zYMn~^lf_}WyS9?yR*yxfPHVWZns-+1fprv#$J(>My;Xy zE?U@?%Rox6Yj!4()n5whS1fFtC$kanqK9-DJGbv38T0@(eAdsBN&BR zoGfW+P|UIh{V0{F^f+T%=osx9z2gTC;Dz}#8K{@tKk3!|%-=0u1?nC@_^ABBI&g!> ztL8SJ)V|dk>W_n!alb(k-Zx)) zC;;WN8D&AsWp3!h=Lbu%;CPWd&vSXPYFo9J7i))8WJLvNw_J&XNNx4}w)Hz_KNN+S z)kcoRTyfL4SQ!3R@%dWaV}HVK^2I>Y^rl5VvgL)qQFzgCdo{NByo!K~2=|-NbGg4F z^oEH~J@>>7#bL=F=6TT+O0*GX+mR7a?w!9F*QU4@CssB!3eUNzu23%O`7Q{T+T6p- z3yv>a3*LY14h4Tpt+m;QJ}fsht0P1-?4$Lwv%Ca(V0G@SB&)mp`_gD|W$CeGv#_!% z!@?+-xto7$TZ=dC$C0}8f2PTTY%8C!pTda!&v=5LA#GY+VcDlKa*M@-bOP}xE-Fvm zSXx=b$ak*zs!blyT04W-UE^+zlvYoMw5JYSQ`Jd`;ij0x^K=dWC{05*0hD?zByLH< zS@EE3R{mX2YN$$z!b0W0ppP`kC=N5piuR0+40qPS);X@Leb#<8FKndmv=6czgu_Lv zl)la1D&DRB)~ViohX3x)_%nj(=S@^{TMPi8#;-_Q7w4)P;eIujv-Bpi&4cPG*aXVLAqT1tL-0v!}dZjE^MFLj~+{ph0&$hMDf} zrCI84!C*yHD*4-eEH193G8r6mcvip-N9< zsezDOuma+tdkr>JSbK#Pv?z)GKX^r0>U-Hz`QB@yFY;4k3^jZg)Ux)ujW_w!#sMIUMD3Aqek$JDb$XRl$;oEGxP}cAcYa55x!H5kaq-l`_w;hLr;ViA&vety zfENPpz6r#+qV$rP55>=}lLM5ep6c@n$|TR4f;1#Yxw=wUmrC`-8TZ_KasO*>KlRx0 z`^Xg0oAlnTQPpoF3X(1Z$5otHf=1b5cI?V~=wrTr1<`}Zj=o;T1{o5gpH98~y-ydM zTYG-@e+n!p2>eTohRFVh8>v~GTvPuoOgqqh|6j<$|CIVqGFig?mrOqli?ub@thI;k zP$8b(liQZk0Q6+Deb?8q1$#S< zd(|B9VQ)Ko&sWwuAVcul_B+vVn!pmMsr#eJ?2bx&eU8L#kQ?;&Atv7(6f&7&dZP<^ zk(HQTuZ_cTt@X!;4aTN@A=yK1{ryo}TO@mb$JORl4q}ngF!(J#bit9+r6Hf;I+PIr zfSnep)eL(OTxVw?7Qg#RG(8PLV#Uva4cC}ql-Ld^x;s5RJs65TVaC^ZbE--7@03PT z%7m7LdWrGq%P7qaUlZ=VrJZ%iR;7hK0SULAY5NIoow?`${JS`qF}3>#ij%}CyxuAB zERp#88fL+?k)F|UHKhY%XJ?RbiY_Bm@|n#DmXZUo z=K9iHY&EHbBV~wOwcgu3m9)4Ns9pAp?r-JZ`QnEp@22Nu&Agr)LHSoCQhy(HAZ=%) zVa?}9#2g~$$QgL&@s~)CZm`0&wKjq|=0X1D99_C^ zogct1begZ7_=FL4acYPjRiZP(K|lauX>?kb%>yU$QKd3?w?vuT*2n}f+Op?t`>z3* zcjlx?u+#C)e+N#yB?Bx?j#ko@T=u+W8+gam>*f6JBHz32`N-L8_5nknN(-P}J?O(! z4{~MP9_#A1W(YPUO#|CKwp?e0vaN3uYWWa? zIVNB~DzDUmaf6OspX9duE2e3Y@9R&+i=muG1n3 zv;hygu-tA>SM&mka$tW=OZE^chv-u7DQ~<3#I*a5!eqy2*p5v@rC&fRxE@I&`T2Ux z$m%i8)VE!2vI?R)B^d;4;cY(Xl(pvvSw7$HMerFUQxdPF#NS05Gsc6Yze8}BK=cKb z!aKo7NZrp(j^-7d=)~PDtS>-(?FwCb*SPiRK1|uwF^Quw=pY%N3;E)x`Lz&h5fYdj zLJ{;rk`PCF=eu7UgN%xPcB1JYJ!Zyjd#ADAy&m#^VT93TWM4WZY(*plTECUK$o4?9 z!W=LS)^x5|j@9v6Zm#S$Sb4Rnj)V2`zjENQw zL$VN4gX!DlC^w-&y9|BRLOZhM3}aXP2xFLa6*KCi)bfqE4zwvUmkj}YYnT~w>b{A> z(0a%-RTF>OT2QN6P|wk>V-po&h%fm|9$m_`^4YRTicr%yi#e%kD5GL>KV@NT?=!6r z{iV}KOk|wZ;oYUa2VNx2wFKbpSeBi)>c2n{;Zxh8Pf5=;v*1UHz&>??0%tw7^VGo4cUW zYhkQg{fqjtg+MuPh8Qc=^DiM|UDpD|%&Xk;FMQ{s2=afBC~vRR*1szec9FnU@AKHN z`gy@wv|Od5;jro3&S_2}?8#Iny&HX5og1aLts>$Jzo#yZbu?@=fAXTXpd#5~!>TT3 z%IVOO4I_Pv-W6u(1RQH2&80$Zru%KgRyu!vkP1I&a+FdK!l_&cXVNi0v5xcYu69?h z+&9~demBiQq~2_4Z<)L3%(d5+fZ$e?DuX2fE*cKf`T+_Ua$GcOFa|n~iHf7zJveH% zpJnD?V3QURp@-!xO420S{>oaZOrD)miQPIARp}m;=2e}G2tWp}pRL4pBco3stA>l6 z;6|oHK4nf-`x(OZ{u6|-tB|J8cu_mme0%-rFDIg?P~~d3oy6(zB&lWvXyf5C0;$Kw7B(RY7k4N90i0}q+HR8tcTL$)Uu@jXy{t|FsjPtw zZn3VyP?b%m+Mt#@pa3dy&F=61g!jbo!9)uk=w_`e@oIb8vt% zXzip$k0pq|-HzPBO1rTadTB+t2NExh4}jD^E^fOHb%~tUGp4CFUe@=T;fltrXFXiQ?lbrf1)NOyk=38$C5HwRL|q;A!33H z@_)#Y{ayBehoKfb!6sHN>8$>qq%h8+O631sZr#&}4xhSU724ELm>%!U$(?+5g)qgE zx2hdAB@lR`0c!=gc~*EgBz4ZZoRdHA{ z&X#yp&A~cfd^U48bss`j$C@rZpk~x?1lHY9>JvH+491(AaX~kZlU!I?K6{%6&hdGL zc&}VlpMmXXFw@{?_gc9zgw2vrsa7RFgiTW5y?V!WcWMbibvGftTBVrKoqbi*3!<$@ zg!Rfta4U{Mo+Fa}Q6&dM)*_He-&T;te(3rqhK?WmTMzI6Xi)kyl(hdCpTw8<6 z;iZIvFCyJsQ?cJYPTE=Wfc}Q@+>lTX4?E!gKCxtBDaDa$PXRp-s4D3AKJB>3E!S+r zhst=4SbtTUJg>Vxbf`m46@cQVofIR&*hcKMS z(PTDRS4-Ns-2Xz2Lg)(QM2Sa+!9)+uQhug(HH+m@9gy+ySFe2mLAmUMl=B|A3e{0p z{Ivh+77DnldHsp9gnlB7zZkLV|B+(@bA?nFV<0D25y>B^uyJA7kvAxtf+IYg`n7bGGNJq?p zCBfQ0NXXX9QL(d8tm@pGo8m5GEccHu-}LHAWy?5^&s;QEMOy`wQVZ{89m|j4g4k_yV+Uq4f3+QHd&ITbJ0!1UV8a)QmG=x$vl4&^f z*|e-Cnu^Oqw%0SS}C%7`o@K8-iAD70oUcmLn7M zI{6?qr%le@t9TKXlOEn1H!q`v7( zz}fLOb!NKg%w_j6^7hn%qhP4!aqJbyj)o!|HEC(Dk3Tnh;#M8w1yk@z1b4Uai4N2U zd`iaOhi8;`7~`|Gz6dj?5*saD@W7s+G;~j}$DE_$+wt0!W1RQ5>y*H|?h%Ksl9{E- zXWaq}AD0%1*uOO7dei;~TlND_y`i*`VytR`h;KO;39_YfODNJ=mjUbwJLIHQMdo8D z2`pwPvU#59M{Uu$B5-DsBF32>;-6-hn;_i-(+>Cd5{|}>G)?g22fUjG)PpYDqCwJ? z0uV6bL9pRkdYuRqh?P{IPOV=SZ9uw;*bcMe!ZMc^8I}p#&5nDiD8&fyeM9y|#b1HE zLaCWc0=p`-;&Dx`@gAemW@UWE>*|o2>o>0}-|0#hgFQ zHT;wbzFMbR6l-9u86rE}1Sz{UyhcM#6Yp zsifMHV`J*EPJH5e^NU#0X^s4lH&*#h>f6sqpZmN7T@QU8yHr`BP;#YWyW)>F$M9c1 zJRZk7N4Dyx(`{5+iN&DxZm(fIhZ<&1za=a@AToE*@$B%^RPDp+0Lonq&k zV>V2JrjCUu*N#Px36B;ZxtC!Uj^7|m*4KBCPP2?IEpI|W50D14{dYa02=NJwdq0DIS4l9%K?xjq= zVm-TP8jk0bf*0Q6st6$Cdl0eAu!a{-sN=X3k~aP_EilXA17&XB&i?a< zi9^QwA-3Nqd+BaFv~MI=51^op?Ou7Eye63fNo%VukT07u;{XwSidX~{d4rPEdXm(!T;ioK8*w2AG z4U_A$JF)>#QhmXzUBH_#sdX3*W8atT3llG6X+tI|y$v%>8Y?;z3wrRa#LumtaIdgH zb&cr(dAhYQ#b$~ht;KLaz=NRhqnAD*?OlI6=#YSn134Nj2b|&T!T8gOA zP3A4T<8jRkTC6KsUgPUdoh(j%S`aVM_*1jq?mOQMbw`9pT19S9v?OCaDT2o;&E?Zf`u8B# z9PgD3OaF(W-TpJ!ope`23$j-C3V9-#>gp3`180+@cSe$pb+m5)f-N8fiVf! zs;4jJRFucR1ea$#=8ecmxb}pxol5;P)WjJi8-M+T3aoSVMQ6azgq8FP`Og-es239W zr*;-=N=fE+bGY3fhKYJ4ecf;`g>aI6P1KJrKc_f6pEFbcYgN4M?vC2}E>~5B2i8p^ z#ofq1_ky1UFp?D`i)khtRIp4#^Q{%FY}b8egydc@xgwFAHbzU8@{`4gt9Wo{(wZmRVvGuekT2`?3+??v_Wk_hYs>F2Vom^;ARFtXWbQYDP7H-Vw&t&*t>VKM7XAz;Tpy-}FE%|q~`pRkh+ni_ye zhO?$udOg~Dql7ibhFGPg*%v7@%`MJ(AE4X{VN)MKMGk}By_M2?KdO^FuCv);%y_r! zb&VT01-e%oHJKdUgl>wX8u2FnnliMEL!LSGEDahj`;|H>cQEVHyFr~XfZdxccVIeV z;)}3*%h8y0flK$?^nzBfv;CImNCZ zbp(%6#^_%_HFi3ZB;H7xac$X$cWM@>8CMv(r<+<3DWua&Voii*8qXQs0g87BfsZAoWoHh!3K7AJY7OKN-$GW*uy-}4XYJqEwkpAXHZE22%w;|?~28(*rYEU z6H!ILIsQ^i5WKf^?H6}1XlcTJP&=EQ+0q=huLSW9l|G|1%DBcM1g&!-7bo5ODF8Ps zfbNm$TAr}}ak$8k`LyxbC>|_M=cbYwyXzqSvUogfsyN)r^hqvc5!)NP-VVi9>tX>n z=^NWdIu}l=?7O#`*3Y%4b3Bsv!h=meEgAp#lX_oS0nRxbS^}^rDW`&DMSbWsQ!=?D z>a-#_n-c;X5mZxNf^diUW105IeQAdDpPQgjoH+-=jlvC@gUS z56t|oS?JZvbA-J;VkdiD3?p7CN!RDJK&n;&RW6s44de=!>+a&~fo%{1aXLFZ6d7CR zFJ|1cQE?!!L4Pw^FZEm7e@;xwoscSpUuL zVnD|C&Es+^S=20XWMtrfVg*4n`$OctlNvHB#!*Y$7e&2vjXc^Z`8P*EG*@JC(Ouj& z3A~92l^RDmJ6GP95MC~nkb0u6BY#iwG{V%0qZj^iT<`;o-BR@1r_19W7*JBt<-uVE zuZP%s=ohXPWumq9jp%0oNTsJK=u6s_Zj}u&U;(k0j1$a~{UmJble!pnCXbW3*d~`r zr@3CSnB&1<5fteMgYU?azgYUw9;S0GTg|%{EMLf7sYHyRI{ee*7pw56F#F0WxB_s5 zN#4$jrJo&;_QG(4Srx>?opGPXuNd4iaz3X3q^OY zh(8MuClCHs0IXkZ?-S(|2?%2E;g)}lQqS7omhT&-UPPS=WCaOJlE*5<&#^15nJYB${XLjp*QXUlqq} zOBWs>d)`P4f+Sk0NZ^@Ho^OYl9KrUQAF2A3Qr39rb=c6=O!e#&!&1ugvokhj%HMYu zzuvF^Z9YXwmB-3^Xgwz?#Fb9HAQBwzdOWwK3LS115jn;T$eSNnOp99_Ug&wqk#UKu zYix^I^qfz~m~p;kWZg9e&v|)#P)naPv9oL9&qjOq1)Da|0&n9bOSqb)G*@`)kHq~F?|a(fpmaj)>jFt5J_cGBz0<3!Jc3$Gmqz)-Rj zs`@;~({%mKR}G3o%#S1Tk4ZD=+5zR*tb53zr3MfJ8NKD``)fsC^>@!5m>E9+STwrw zKA<`632|uI|5=T-jtUvrk;b70J>4HMhtdb$7`5Kt;<)i*%_GuHkVE*yKY#8{+^z^v z$vpPN+1+>Iq9lyqw5ysmnbZyUe@6^b= zK)_M!9L+LgB-8y;Trt&#qG1Yq2K}j^i&u?RZi!m@F5BJz*+3HK`n0?vX$301?F*3Lm|9Gi70IyTRcNOjYlr|cYFQ7f*1B~jt zJNNRpQ!6VJYlP%2?XWsDdt_8Q%B)KM^lhwU7uxYkH4po5^NT|%3lD3na!mrU^#CV( z4PYqF7NfL~zA!TVLpxinkLiRL4CYLEX#UNrSYCMgau(HF6~~v9Xlc>8o386IEwa#+ zFhE-e*V3cFp~_Q(mIPSTKvzdrwnYF9*L#@sUaD8tS(|0xoD!VDU-@5ojyK^BpGD6i zkFoK9^^wH#sORiT04F*vy^omtK;6719uH}320@c&IqLXRuSs#$0A&`lrE2rLtkj(| zoI$z0dvVQW9p_K zFeMxRVJK6@|HV|MKUu|MEe|%j| zN9AwkZoi3F#hq*jc!ERI)i>G?msGM@a!+g_qdbPy9IAu@+l`(d;P?y?m!dY?ZqVc` zzj1crW$B73b2qeY-JTz@A~TG%xG&o}&~5Y&O-K7-x?B`$l!CA;NgsaWy>K*N-a|qI zM(-N^NqEl8<2V-M(mnXxt#e@{haC~I-#!XU&{5)A^1eS=O0;NUqNz|Riu}qs%cXp} z_P2NKh6yfx8h1xw3eVQB8iE}`aO6lCE-hq`wo(hkUkx5mqg6h-fphezq&PIxq4hfo zF#AR-#oFB$9-xxX^!j8pznvkyh4x@9Iq+I2#LVg+bCd9rN(UMjYSMMSe&@5#pKj4~ z@aWph4FAFfZ5OJ87Kgb?UzuxnskSGHORIQEB(|r!2zrpxuG*7NjGooaX!HuGZ?w7L z<^2I4RG-%Qd%{rDY*}mQ-)7!SVGY=jc=7o7#Hyo#4W&6Lrgmg}KZx!m=0FU!yLkNQ z#O1BQpQp>d2WQ63@pu9L?PoSHg8b;kg+YoOU!BOTm^4G^;j2QE=TAcZnbc6pl-xruXzCNG;M9~JZ+6`CbNpRfvq^gNfqd)o##Ja`^KuNejJUg zF3q?unu;meb_H_S8wE$z4R+stgpDJ8)@k?I>w4p$m5afyQp2p>ltL_ZV!>*40({3C zk=pPKSpDkpPL=YWdz)6j83T1)9E^L=9AmEbE6#Cw-`YBSw;-(Ev3KT_761OE3U)mj zN%iCi36;FZM{YYZZUQFnnoq{*={RcF{p539M=MR++grBYXLwz0eVc4DE<*UNQJ#BQ zX&)7!7fb6J{d6)3DNW)Slqz>PKgzK$ol!Z@4c%-PuTd(Ws_^J_3l}bHUSR_HchQo~ z$xh>}sAQ z^?UMQ>3or!>}B_gZud>k;UXfU)F1!Q6B6J!ROn z=o6=s!Z^mc$#W9j8xU}YMenh8(zhQ+qs@;%KB=^Mg!-pDW zQ_tK3*Ew`+H%DeDXJ*?gSpda=GcY&K+W{WU%~3a34ey)rr9rLY*w%s8!)4!}(CC&w<#x%U^(#npJ9( za9ovqYy$5y^5ye}#bS{H3=;Ppo#sI#h@zd~^n=Kk-TttETY>v22M|s^GpN?h> zp!q?&zlez9L29RE0#?S!a$gMt3H6RQM!0z`85OJ9qo7jZDd5_9tE%beqoWR3a<1U? zy}4*S=-TUFlU?-$MjufD$A1_<&s!qOWw7YF8eE^#EgQRmpQj?VZR9{)=ng+9(de%Z zrijzy&f~)q0G=e)>!j;n>E&5C2rce}H98EGXAvu?yZ1%kGRimFu;^y~r!-sU`9R0g zjE95ITZVjP9P`rllfP^}4nL7RpcnAqLtns$ATcpB{n>dXNQx5soU`fDGCq*1*;CK< z&H-%}rJy?k+P%kzMBK{W3twH$FBdViF`9`faEUX;WY`W&nhG&LJK!1d_0yW{*%^2W z)=ZifQN~ME?nJ~t-~l>K@QBpd)49TCdHWn^pn-W0F-ynL4ztSXNrn!gIedwQZLksa z?O?kJsnX+tmsO2F#&xy9)4lAO6CB7#n~g9@HhH`i^`hFKbXIS_<6~UJl*W5l{opth zoo;EjPzzmXP4!ikunBZDnc1>uf9Xgwj&T_^eBc$h`*=Z?)j}hHreH7B`##xzZ&mpz zqOra854uw+V3g?z{txO}_K)tXP4TSHulbJ>nc+{{bv>^7fM7*{eYN&|ici%4`7%}t zlNta2=1Kjpr|V*N2ZwGn4v}0c>K7olKmB&!;wnJz0Or-Daa~Y(2PLq5O!lB)({shJ z;$q8f4T8plZs%=T@Pase_4TZeulwWGjmzNsrnn15>?1EX`;yC1LBJ~N->4^!G!y5kka;&#NF!q)23K{56o4qJ;-cA*4i~T2{Phop>M1T5h9a_j$=_mgYa2fL z#*0dK-c{e`um06tuk5hCRNVq?zVE|drzB0EPPROB=mC_Ow|)!4Q^-_Kv2}|duDiS; z6`hi$WcuSvVIRPaIVGhEHpR^OJaqK2~8kB%Rfj~RH?7I)UhW(snR|0;5P_C z&270(vAh11R~qG5=T;{=j!cieXdLdBHd6;Ku1QmDnL9+Hc3oFLZ^vIxFjyZ}zwtnLIl)?HnXu4Ws%=kovEq4-Fv! z{jJw1p`+T_3jj_r4t3&u3w*rd`BGa54KWIN!cD)MCHx|nHrLxeKI<|`>#`Tf8|51N z)6SsiXnQ-**xEn)nVe$m-D6+VGvlu?GNE1OvNJKUJp3c7nD)(j$mJ1M_*qkgWUK^0 z5ChqqK&G3FJTTtkJWIbN&~?woy9x~Q!PUyS@u;55DZ4S&sVaxNOo$>16JqeY8E;+7 z1iZ6Al=@@51z%OxubF2(?B**o<38giF!TG^NcwU6*U+b!irbjA5@Az??qdTrY9|DC zOfftDqvLjWtnVdIlEiL=xg)JHJyU1BI!Ip{axX)NimbWoX4+ScltIpSyq2axilx*z zJ7`aKce<_hrhUV6i zgq?bKB+01Yj{7RX6_QUeW2Ipi8)fZr;ndusc*WO>c*P$TI*ZG_?jMrf`Mr({1oG>} zHWFr(087;w$zvDL++}S}^MIV2twtgACEBFEl&%&swut!T7_0B>e*-mDW*+Rhc?`AF zcN_dYsx!hrR>rIY-XYo6T?TxLedk;zdoeD^$joDbzbnK>>Wr;>A`vh*prjq?b!;IE z|9No;KIkts@}BWZA*CQ$I9?zV^ZoYDEu`tq0M*Gp&7G*}Adc0+~ zuWos1-IDf*1DR)SH7aLe9DfIMR&YHPBu&odP*KEOm%9w>_@k6YcRIA)Z|2P`j4n%} znCDK`954P6M=Zn^7U*g}Jo2e9DOiJ6P{ zu4>nYLf(hY3>9AHVN(Xy*2)iK!MYY7x1PsHl^Dqnmj!$bdLes)?B%tiXD*FfX%Mh! zk<_4djSzm2;6aJy#uGmm$D&`!sqyfbwBgJZCp{0N-n;(C7IlxPT%&bY7A~K;&cm?& z^*^bBHvmLLQm??jV84!BT+<;=L*{0wQ1vp} zXgh@pW!2ms-qT?p9{0To?^Sk6{5{b1fhGFzR9CC~+l{vK?Em-_f9LP&<`K)5?bHIw zqJw`+ivO+zKsn3vD!Zuj=^IV@TNBKX#h)kZw-`R5KxW8-Vku%$XY6TU=+9c`){?z9 zeMTgBxwXFX)hEu;I*EVjCxq)|aIpxdEHmh~yMm4&I|b zx_-Xi8*yXC;)GfOp;!;(ruA|}htmhPVq%S=XNgY5M?V%$vKf!de+5XUK9T zKBAujCls1IB{FZ)r0o*~#240?J;Jdgse#fZoFY;-`NkSf<>N5m95wwG#zk~omt_Xs zzcS#<^J{7`Z|%g#y)r}B;VM(FhTvGX^J8ItPN!6e27S`>xzyCcF+*Ce)t*&z%xGb$ z0KRmc%=brUZTbOZnFSZRD6niK>uNr2Nc5qy-5F8wOyy-5hJCCZQ;2@JZaDNs>5Rbj zLhzKQx?5&PwbFG-fM(^%6lC8mZ`;0P`5*3 zzM$^JL1y@abz?fek!ITdnzCq3xocLbCd3}2^x|Bs%qVR)^YoL|J52dbNhA2UAo%lQ zwx4e0SZjKJj#1SP`yE;_I!Z4}2sox5)_$lW8k{Hpdr)fyb0P4BOCA5SG5$buG+4nU z`R?_;44)9hBF?%sFG%y!?B$`jJKKNe($s`2MXN!Uem&O>9T*h z@mX`jEnh`N;w@o5aTM#YkC~RJPKN@4?yrI1MMnyGWqf8<#z|~#l$@MHb za>5&@AA$lWz*T5e|8zv&@RVet?XO!om^0i-cVV3R6!VLLI!8LGoGAwR^}5XqR*hT!9VR?}-W;fP-gEW;Nt25S?8=Te`9`HXnN zZ}lu>+u0;Cl**hRDX?N{zyU&L0YX;M$h57Pe0jcQHNA4<51pK&+q~!8S7%_e5%=gy z;Jr%nNYn`_>*RU~RAM|rCT(1vXfJVP%W!#!_HvatcsYLYYr8R8-O^+@E1m*7E^Xvs zckOpkIUg&KZhB<>)HO*HiQs@+WLU>i|Cr}$aklr+Wq9o5viO6n`?@NLZ9ke;C_qTd?3&KzKaM{Vfo1!Uw+00$F z?Vh#nZtIsSg0&45%Gh_UQ!*zY@1&+wnzocw0Y%uK+48J1lU z0pwZpk9^I1Xo(HeTg=2uM1$=#6V0!{1k=t$cgr*J4hAqG8n@^uXVmMkKg_L2L1K%(IVbP{vLK%tj`vaOD8{+j+C&&gAhxg-w$0$pDHXuW zN0R$kGiH^=dWek#rjt<(lzR5q<+^3-c%Xh?Q9MN!m=c|lOqtbP>D2eAHd9}&ZTt;0 zdW^?ypvH~~exlCJ{`6n3{G2>Cs+3N^ZGf?_-EDrPt*%Y*N*l{bz60OL*!o52th{U(u(3 zbK`zZmc;{mdgQaqW;Y$6dTD|p-3Uq+Li^ce}C1 z#vA;WUd!8LCkym`aiD{{M=4_-33*VfMUFjPnEhDN|;;`?;FR zC8-arC)Fe^D+V+x}5j${|cq=7H3<<9&T@q zX7vG$GL5d<{@r|^Kmd%M%4(&>C>ycoJOE+mqT3%vPXL}Bhx#KUlm}W1{KW-<54Qho z5_XcN@Nh_&nEOPnA;4Oa_U^d1eMbLZz12YdeLF?-z%6!lefk(e^zPtgq5)CZZ>qJ5 zYMkaF-@UYD} z4eF?~mhKI>?OD1u{R={PdrsVNmOf(h0C4S7y*s-QTKBl0sH~=lgiX@cNvLlo=|}T` z^KIo>N#j-lKH`)7!)q>{yUK~%w_Qd~03?dyfYEbiIXU}%>#FVY)vJx^qXTUbj_beF z=mO?#h}CB#xpij9%ky}5Vma$!=#1Jx$&i;9`2%-lo0lyUt9NkO)`VdGfVv2p?;-;v zX>G-!3=%a9m;%RJxCt;S?H-2bQPZ@!y!KKu4W8^~d*IyJA5O+Ky9QWipQ8GMPUFpY zNi`wH(W34yc*)cD?B(7EjbiUxpt6&{`XwvSq=*Az_~{{jVErQiuzHKP5(7nD>{?Ql z302=B4DWn84lLdTV6)p4oP9B?$ACgFhboFNgQ{;!WXC8K98I#Nx= zHa@d!rCOY}{u~XUn|m5iJT~7M0XrWs)(FxAa59cp3_nix$^v>2&Dm}Vw_iUuT;PFV zcJ>J_LRYmZx4!yQkK|o$GWkAp0g)nf#k{Z`uzqk%Dq%-_xlKh zDtySeLX*1(BYmZ_F4Fg@sHyTIhx&LYYz$&Ypv4;uUQtNn|BHP`UDBRX!4|$)t+Fji z=NdMxx0U?Bn|Hq(I-Hs0tzY?J$q9gLjos_ox(rJOXlMr~T49sap>NSOF-Hk7qJD!+ zfl9`6qqr+9u9uH#+1%ZJhb06#qN^c)%ihjftzsm>(}6HPRK(rmLOjKL<>C;u<)~-f z60-TT0fr$g44ia|U%2u~2~g!Xp(cNiH#!I++mv46r_-7?8Y(Oc6PK%qpEf3l z(6Xg5dQj(L`BB;fbBh|qOjy|!L$u_Bs1~SJORBvp{Y`u+qcJrx9>mNj!&0skJ~%V- zP&OF`p&i};IJ*g!p$)7;cC_TqO_!0lBiI+(olBl+;S zofR4j=uda)NGo2gLX?r&-2j=EwO5qu+3p#UIegBlUPryqXRi|qm)aE0C{!wsHr&`F zq=rPVT4oGtR`L&BcGkYg7X3aG5|cYXy+&O*TRrfX^~Jq}h|MX&!|>P2RmTofruj*y z-Ygr~1QeX;0d-~6CxuZwbj#~kR_h;xo#=K!(ktBK)-LgU>#=i=aT{LQ?riyjC0j$} zrmN}t=q_J-l;O!VxRwBN^ugj#H`@COb+ubcr-a;_ z870TlUA)TlAQEUrEZiAwr1YA`zB^a6Z2&}?)mmybTDV$G_xt@3`!+nKh+87`L~d23 zkjf+OGfvt@4`QH}gcI;l+1vD?$2{a;%SYUcb<#A-it*G6u96V#Vg%mFQ)n+O+h^Db z13-}^#((7b5BP>Dck_!S+1Z6^JMxh~Djrcn*SYJ4@}ElATQ7FiOFy&O<&X&x!Syg| zO*)3Y`IW7Gipw8c*$50%cj^&hPT;=RMb<1o4Sfl3=-MP@ZVlW<^Z6CD%Qv1Zcfx|0 zs10U&3q;Rvj_b{7(T7C9isdsegh*a-DvukKzEcHLOD`SLQJ6*xaML`g|s?YSBl`z zyuKWuPqo;5pzN}{J@f=!+WLM-$;goLZUUSd4NR7|nK`1eX^Klm{OdCgpS5$Myw`5I z>lP~}o>7Ouh8ov=l#0Nzy9$*@%ew%LKx}Aph{KZzs01CRmgtHERW^xkgeB05ex&pt zCf@GV0vf$q2hulE0Lp^az4gY;(ZcM5*~*{0eqO5u6v#stwg6J}g)1D0MVw?qlH7wi z2kbW)!`rpOq}2ehe9X_&b^4Jj#FJ!i6a+qav#mSN>JEADRC>W5GA}$Qzy0Zr)~`8H z%3r~hWIst?ZMOh&p?x+Q?aUf-x~punMq78jNIlY^uZPAxf$asRlRP(%UU)Tpg0w_j zS<*hh9^YCC4jBC`G(R1eixx6r-@E%{kiY%oL=64hySj3jvY_(#FeNn9PlAq22*aN4^qptZ#yL!H^1KoN&H^y-Ai3ZyzbJw?< z=Hk!Bwqh%wfZ&;T?MobfR>e0cBp=)zW~qFV{R*9@yInc*%t-&qxoj+tp)XJ2ff<@R5h zM<9+2$|T2>=-XzAhQT!C9Lr}wr9>wQ00y|U=dRfg?@1wdvus_S2LlZ^Zg8*x(G*qh zgmVKOHwqX?DekIFS?3J-_q&;y=9fZNz;=)O3frX+JDRSeGc%E+|o>17Zmdv z`fH%=4K{TmwF3-4t5ge3H%L&nMW#9KGxz3&cZ!sYo!#_xiZGbH;O!$Cx@7~d))Rt7 zeMPwndht#}c}Yqvm&veCz?l>Xr7=Yx~7lN>_e;-B}Ac6vQ5xWCg5m zHMxBR^)aHJy1d-BtuB*(v=}!=0F0NBu_@nzza>>uL^2&ku`c1y0KG0L0Gc_Fdv_WR zgk6XddB#RchbtoLfm2z~UDMgoPETLm;Jvl!s!%jjPF7i`?WD<%k$e zf9iDyo8?`sUEhT>r?sRO*$e%rco~m58nmyh@4ip|>jD7M=uG3sm-U8`=CVjTl;PgqosT7-1)_p<(;KN(<>h_(FQY zm|L;1zwF>qL(w>w1()R^*Ug+zK%$?_14#7I3CskuztSz(3)d%Yqrjjqv z>Lmsj{a8-0ze=ZX)>lwq6sYhzNog7$h>Uo;Ot6_<)*bZBd|Z_W9$<*bk#z;f(89>Y z`tMa|fDnkh$fsbc1eZXy>>znvb(w2FdB97?txl;pP@9kRR%jt20PFoRAlgVG4L7Do z)~r_14Gz)mqcl%Klqo4Aru5-yjONo!1jf=x{FY{n0D~|8imI&ifxI zu4@8;mbLTORTvV9ey@1(^_m#S`z6;PtxR$6iz<0qe#x>vb%9GNZLTA(VC;-ayc@YF z-#M}VMWs~8x;1jI?>N=(J}xg@00NkPdR-ndRT%}UpqfwI;c0IDp_WS1hZ3Br`et4D zMuoqi6|;MNt*W^KawX4{HdM ziK=YN z0HQap#iCSySS!_0Nvb@;(70a zFp22WKCT&Lhh3(e6?7`2-CRFt>tHZ_Dz?zAZJ56<3T8PtN=~fjU#l|>+%|`e4%0?b zP}is@F&jb_X<(z*BjT)^d2dL$r6$fx*x>p?8EBVNd6gipJ{n^^=YiWZrMo8Y2L_*5RxpHh) z`H1d{=p|eh95rON_a2vBy0@dNk_YF4Jf;)>YSbtXR^QSFcU?nuyjodtk2~7k7Ld9Y z9ebFJGrDLQI6MxiAJ8V*?T@L^=1CbW30Jpvt8$UKBHy~|enC(n`TY23s@7NSSFfU! z*0}{f&CHy=fxO;Y*X$}`Qz24mw`FZ{JouNldk32fr4L%VyuuGY^wZGhmUdi%ZA{yC z>@WZ{-b}B8G;Tv^%&k2B5hxlwz0&n0c!M_cW$(2*JxO+ZM!7D0?V9LK%D{MUVSqjE z8`l~7LrF8QF>tMq?XitIe&%TBP&ilDT1brrQa4|1&DzV=BBqVTEabI(n$2AYv35Ap z<)v0qcX2#@1s)sF$rvd{6Wr7h}I1*jiYwCG}fu>G=G>4PCuXRi67CcwrbrT03ASaP!AY z9Nhyvu5SinHs^ie-KPm1yFht&DZ{V(moU3qtgDk zQRNr2oOwjRDZvgxz#i`(8Edkb<*bqLN3cuh6Q|b8a=!3ho0*6&WxbB}8;z}7rQmP{ z7LA+Qg-6HZH#HL~JbI!>bf#>!R_Ib6(Fo&Ma{`tyzrB0M6E9bZYABa{1SHMOw(amr z6@RXaAhL&B?w7S}=0FoDz>IriL@|2vacaLW;rO`xhvo04C>zyYH9=>}&gXmdMnz-7 zfjPKzLl$fl2g;4hbJw@QH>Djv>T=8_+FBtWcX}}=AZY&med(l?K)zH9)AXGWR!goT zM01|3hVtKCqAB3r6p!uN-BkM<`7?uxGQhRS_%%33T7SS&pAb<||FmY@w{QE`bHTVe zyk0(>3zt1W;pMT3zN&6E;Lz^;z3SvAa|FKr(&~Ckf2>$)SBxZ*wHZe8J&t2L%q+PNb+||EnTz_r08U`$WD2dbjkhAQ#z{GApTF z=(85s5xh}(fm!n)x@?G5(-A74^~M!-Wh-vf~GXJ3o2gtr;($@ zn#<>P%#ykDmgc|64$i#(IQY>_aZLZ_yW`g4=k=4vas={AJ7qhhQ< z-nIPOrNd505p>~9Qr;J|N%hE{2&5%A*~x`IHxQ9mG4s$esP7f8zWs#h%~mc2QTgG? zHRYEn8<1ya^yI}gi2!`XxFD)GO%5Jgfz2dQ!kt@qQ)u7kntncF`Y2XlQn>LJVh6v)`r;&IleLCXas~U2YK9PY%{! z)+*}k+~PMkdE?hmW-a|^G3(J$GpCsJ>v7^&?W41+l*}YcH?$_l4 z=GGM%x{6@E(amBjRZPrv&fQPb6@9(DDx{bTBM~>lKRXaW4ltHJczoE5z1TdT_ics- z(spAR8O?Wj?EEaP-9$VSovAhq$k(iab95Pzl5sxN`HzoywZEad)Moc0kK%Y!8qPVS zns*0#BvO+KAC0g6x^ntr{N}B4E3&B7INjKy#p)Rxx4r~B&gi?jqZ8E`vQ)*iixDYo zF|IO*CBg4mB!dd2`pkozXg2k`0@*WU^#0PTL~%3GFLgdD!zZH*1+Kp9$vuNn z*}>q0Lzc;VdqFq1VQZv3Of+-h*@vqe)8m!`6<7hWa-_vp3$1M&By46swP+*dsRZ$& zg~hIGGK5`Oe$J)uvaWSq3Y5At#) zj=+khYr z`~;4_@PZR_VlVxl3*i6LoJ!hp{3-~Z8^7FUu0t2b+txZzJW`&aO?{Dnm0OPu(_|?7-_b$7=@n$S$F^?Q*WKyx% zpOfFtkh*XR?gTKI(Ku<~GB{u{uGd`&v_#MeQhH7iEMWZlU7B12&gH8nUaM!+zC48X z9af;O&uE)GaMniFm{%+T4JwyI6eUIx3xu=)BmB2_w`o6GQ9Uu`fYWeML3R<-x&*(6 zeWq06;~NC8AObx*(^ICb`9aUBj)MbtP+ul&u1Esewoz9#F9)DHY!WlSMoCxyPpMpt z4#hQwAIe$1xm4WPFqb<#WiUfBDOcwFO822I){m@1hk8;T08qS~zz;pwpalJa{&z>* zc*AlF!oIf>{-^fTJ9c}j!6$R*XMgG(7muE9nZ}~#R2?e&lX$c538(mUUmzDn|82lH zb3K_ak=A>Q=!V1tQ)Vq_SxnM9M#v5`4>z7J!t!%oheaU;s2aIruA@l z^b~V+u_GZm(kD%8Q0f8GMd!dH1jdGmMM-GxI$K8ZYc0Apz+#Rr`~!S#IC{W%JNI_i zJKM*c0FklluUd-Tqn*L1g)d2>-_Z^Apf$8W+ADV2bO3Yg26LUgYJ3&=mmXBTeBu$W zy>GRKr^mi$C<5Z1z@-Kg1-8DE8*fMGCuaZ0eC&Rk4pT`Td8)-vhPH})V(|^v^|)uu zW|gi>+c*bnhT^G3q(SX1y7nyu`Py}F)f79+R^g_G3wU$AocyC>hhrX5cRg2})4bkO z-Y5$Et$05d#5o|#z^8u;oLW%rEA4Y%DJd}WD}jW29)SRHVFqz*W_D9{0DoPM-oG%j z#eXuhFFR`)EzUoZ=VOQ*uCDLZc~rjHkd^HL@a2!vG)1Rq@cRnCg~u&?4ZzlYFb>I! zwkymPGK0f^dgJrP=G&{sltTnUyjV`6@N%~f5g~7N+}E@}nMaVQhMLj878j4PR|9)Y zggT6Eno}r(pA%G~sT4{e!RtOQ1<}H$#bOYyzpTt%bvK!Z;DE>B3ZZc3MB#AQ@aOoV z_d-(rMRt4F-YG8`{?}kI2Vdi^i17v>`D;BK>z*Dl=9U-Sqk#8|^(=vKH>xyn*kvif zZ%!u#Ti7vtf@|qTQ!{nSVZ9rJdEZC0$_kORI>nHyWx!HMiM~E{HkUEu0u7+4;5v*x z`ucOkgvgOs*)_m$%+SBFE84x`@o>L*BKxq z+prF9>UyNw=R>n7LD5&J=d=^iq7Zw*3o@bCuJ&d2iD~ZTf=?J(5+%y# z@IE`>bk$%}-tmA|0Wh;g0A<^DYB#kAkrS!Byw<|rL~r3zz2|l_VXTLU4OG;Y8cU|A zC|9jGw^^h&T4cw8Qte0CuGHb)y(N^7%SUYK9$QPzOer>V3&exDjVQ@-4%M*b<%MO| z-m*`R3Uh&h*eze@!mn^qs+U8Un5`BBYdLVWnRhKtZ8MI9n;M6%PBv5I*&q3s z+P7x!tJxOK68nTX8WJjyIubSRZ^W929e*;lm5NF;9hX>&H-hxRSOiv%bdYsEfr7vT zKUHbdTJ7MM6T&*Vm$USdz`k;~gn{*+#7`-;R4Q);?|g7$ebvQOPHoR*vPyk#?(#Zs zMISW18@+tS2I{|TxeeWFk_I_7N1kz8+z5ojQK&M*);&5RyM^;wl=ixd#nQpF4A z?%t7cR4%PzuI!P@cT)u7FqUQ*f*HfsC{aKTS9^6F*q;S77C3=lc_mtp&CU6xn!Z_g zr*prvY1tmoAwLsFx{!R{qJu2RxlqQvNbDghsnu;9qPD%&8Mhj#T4vi>l?>sG z3_O9XTNGBFtW$h!w8E~5#tOE2h}}OoQR?(C6Fj~%HH;$r13eaT&KqdsO?_i?VZ(GF0pP34xhE? zJyRznkNO4!uhD%o1`zjMIDLRkW^g%ERb*gQhRo%WLZ}> zwKqPL`BR1YsN?EdMgTS3RBz`O@b~Q>5{T)8W8S`_1NNp#w26^28~b24z|8>ciQJa% zO`^AqExBcKmM9AY7pLSU`(o8b%yXv?efoN<$+M>;RHnQ zsZXzI$)DnzR*n?1Z_=bl!2N3(!dmv8>e*D&JiQ?@%Oa=Gp{JuiKL2d^>g6 zl4rmiakl^c;Hw;f4s;OIi6&Iq1jw43NJ59|JU2J_rz z10<9ghwY2AtRp6he4)zdi34U5I_QswAD^$85bMIY3G6EQdgW` zYJc!0Dmrl?zmaYQXwoS#!Z6WQTsFJ!2 z+*y^#s#MDhQw|j^f#zOxP@foLhk0OBctR|3#XuZShhQmS9vPLJ1xzPB>AD0`*cqwb zAf~(lP}~9dTnM)Z5YYvg>+jCBkBthz2L15c7XKi-nO|Pk)mB}^77qMH00OQKv^hS= zeZ~$X6b-PH609K8o0cbc^N|O}Lug*q72Gusn^r)C`~vPQX`c1oR9o z-~=+-*_kZGVo9HcOmYuUWCTKD3uk(d217gBDw)#?g*R4nBa(Zxli%+4U#P}ym^Cep zfPP#0Dh^d-e+ggj>15YT=BVYw=#Oh2eO@!V7P6a9dL}8o%O^cYVlP4?PS#?bRlY)U zM&_>DvH63800~p+bLBfQAT9hL2~}sVq}ZJdOL?SwABaiKp!VX*tU+adU2bCY<4<=5 zup5(H`b)thQc&XcAxdU>nEy7VI#YU~cVGs8{lQI72(0K~^KH8Qw%tp(QW=o<(>svW z#BfNw2^eLOWR8$sCKceksG0nbUXNpayoK0__8a>nle@-pXFYXjb67gysrFA(;OXB}p}2E+*b3@kE4XH*#W<|&Dbm3+goNqn4@0hZY&~8X;lD+PP8XG z#bd*X3QiQDQsLq7alT_8g;PEe53}xDlp`!*4eRQ=)6BhY0SACSl<2m1wR}JvE+SJF zT?P~>2@d3P!78mTrWFx;V?NAH%MGjzh^p;+W5=?5xw74@p2`j#l;?(QH_`29THIYZZOLZ7|-J zgDcFx7o{4>! zf7tx`z=x5%n*0Y*8X9`PE^}hktx^H$R=4Fc&BuoQxxbN8?$BrcG*mOr$(X3YW&_En z0MXHW`?Gcez=7#L0S6qVt`ZSlwWYbxA*DcLss>HAjL(JCu)Am6OopO3hz4$gH58}~ z320&}*qG8iD-9Li4$Tclw+ulwT!n!R1d>fFZE`7=tdvwHxRqb3UK-FSXn623*H`(&(BVTq-d-<0zymw08bk7(Op1BV>;>AuKZb>e+AwO09S$`(?GH6r z*wHE;nXQBi6{=dbLvv1#(S%qGazEI%oxzTttgSApdR1L;;oAvnJp95TPQGA)Aw-Zi-$9|VwqAcl1vaX1Z{RDC3U{b=5smS1dR9!@(~2GmFM+2UG+;Tn!-cFJ z*?3p?2E`-H!JF^=#-pQj8rkJn=K;kq3SfLbo-$e%kGuIc+y44TyV6u*Cf>)}c=kcn ztE{yEP)YXt8Qc`qLgX4$7+tqqN0&;1%wU_ic(0s-G`PQ*NnOb}5k`_uV|h4n!|cs;0G!rwl^!jVC~rClY&X+ZE!mMm5v1 zC+g2z+Uk6PzZx;6n7Or$V&vG*nYrtLr!FkXwX8T@c|ggb-9?rj!$wtXgbY`(v*-rO z$}cJVEU5KoCGj1Z9WH4Q9pQF%#J{iFaWtqCDR8B^l<1GS;n{@0-{_f^XKTiqf+I>7 z9tZoizyrlj0Rf_C28qSHliw4&zA#1w@u!&~30JJ(o+p_9rL{q9uMUTtDSJX^$8x?( z5ekQ^p6Ix-Je(1UtV~xFvY2B+1h3)Cdom5tr4M4$EfXCn@MqI~tIPA+Px5H{dx{Fq zrx@b`a4Za_aZMZWE7gpzlZsjjQf>X2EP@yu)}21G#0e?At^ebMK}bLvJTMW6t{Vvi zGDVN+qqRA$kqO}fFKMzjI<%Pg_ zJekV}8-ebM_!R6q0zA%MY`sf&=^_@#Kx}U}Fn)%BTZx`u>$YY1`C>DLoOP(p{Os%g zmg~;)n|@#+XBLCJHJmf}(kiB8*#B|M{-4Isy?;14lg|GaH`2e~(?E6rWv>u+ofo!H zzPC`mhTkKyezfdHAy%c==xQu_{U+}$l{aLfiE`tdrF$1p#eS~=HgDD@BX%v^5>vpU zpUf#T-9nn0?));g(TJGQhR#NcVeEw9<`AzfBAtGRi6k2XFx*&M|$tW=)45s}&J%37GJp9uRC%ni&WW`IYn z?Bu`I+~DgXc&9H0-V?#^T2d3t&el~X@Aj|)499m`5XAOK<-%W@N}55yk=uRu>%Z09 zcw-C2?5#m9hb;UI7Ftr>JZ)N;-DDVbI#GGOBXjGFGhT+uxisdMo=aaBG=a-9zBMG^ zb)tdL+SLFo*ivxGkmEOiK=fu*tB;c@m{J~m84j>K{$N9fqn?`T6;JWz*YZ%5JVz;~ zhiB5!XbIv;Rzf;a$|eyjyJyZqt-sx)W?~wli|ah$Yj#LAOV#V@zH_o~f~M;DJOm-h zkeuD5Wa7@cy*(D_K-~%f4%;OAE(Ggj&el-1XcwBnM z05Ufyz%AcCipR@veS(>8t^gAEkV^G(hM=$pQ{1z=(~ zdc4ASuD12+{f?6JH-Ryi;tAZl8ke!VVT?q)fCI3!Dac%+3p|xmiK6n6E8X&=8Qwc} zlyJW|0&<;uV#9}DDbzrz+~h(7lQ*m%3!3JMcxrtJGgGrnxTOE%cW{(_h#a=kQ9F>w z1tykSNg`zpkQ2*SWp}4~8g9okm0-*;>|n^lp>mD+pD(yduuyqJ(FvVF$#_iJ;HXKEq2f3-56@_V&V#qsri};e`_zM+Ojh#3tjA zZ(L!Sf91CsqS0=F%$yi+nOj#z<`!QWrtc#o1XQMbl!nWn43*>5bgigS+&GNP`L&kK zm&ZFL|I5aEB4*XF5*{zV{xPYvs^c&on3f*vrqjkBpg7n$6yAX9dQWp!xtR50oD=2c zXzirFgqG;1GP)Mi`G;IE>}FEvFZJ8&`{k6R& z-ME}*AfDD(G~XiJ?@q$V567?{P&JZbcm7!0Y<{wD3|iw6IJg#O?!Bx*{4njF+qlp3 z!rg79ot=V;f$RZosr)ySzOH<+i`G7^4jt`T?`^lgzPW_Y=&VltEa+X6F~TRdKI*r` zV^lLXfEk!QB-TVRz0sd89^s@dRh!s9^2{Q=q8!$Bq%&l_zV4e;(^Q&M4AIu`BzJu7s;#7gh`aDjgH zb0;(6**jlU1r(ksVi+LG+JR4axK%vTSX6?LAsQ-c$#ag?oIS*$UR>#*_q<aUDj$&7FQ02%^*W=*h)F1+9^53<}R_g1zfp2vd`uJN9dD3YuCep@i$VW zzFNCDaL_9~b4d;Bie$VfWsF92S9to5Os+K&oT`XbuQvX{MU@jqdOhK1B}ZnleC;NSGHcf{x9l1z1y%-W$Mp6=hDqm| z!OdXytPmrT{Cc$BtGui%SnR^5ZV_Nl-@gv8O87>NhLp^7^PLZ12F#1!zD7fk#vWRW z@M2>Q*mj@2)ukg(7aQbmbF7MuMzyS|hcj$*Ae6aA3@YZD6*)C0GkJc+5#okCjyZT~WrH;3-KGkgNcr0AV{4A!AAv#oDg>F6E>?p@sJ``J}W zZ*4R<=?z<4$lu)6W&??SPYUEu_X=0-tY%&E{ed~2}lIX~bz>S+Jx2AG1Y>x#$*+Q&x#cRJyz%I}{j!T@f(F?d!|DO#ts7Qq4^* zn5_#{knHuog!+Ua=?{ec5KK-fsz9VO%vR!Jpf;(hNOltb*E6;~(CWo46Zd+hf361z zmslZW2=9LGym6a01%hBAN`Mcuj+ac3xJ;p9{xNw+f+D16=Q+L;5Dx*!$#QPO!73U`4V@s&pb{~ zhDz7HH!(^nr`V9!z?j9K_jM83#3AK77=8SN$do!eCu?My{G(3+Ahb@;K3aI>KNc!i!YCPN6u24B z*3Y3IKHI$v-e6EDqQ^YV36j)x^A`HMxx;O1jqWq-!P!sn<=qN{Oux_S!w$!K=`O;fT3at5Ae53wLUL`ZbMq_kk4jt*FF?o(=72#Sc^C_U|b7y!wnmaoNJ#5~tj)Jkn@=+Ksi z0ApCO%P~qEiKif1M)Slsz#IP}&%TPV{D6p{9lnS#Cx};?V$XBB+u|roVL7n`-l=RH zp5IC5!EP6oUzZx}Pgi%W5xCbxv_M>u`kBUwCdgz}a>Szs{Giy6K)nXw-DVh%O_kI7 zCJ{OlDOK5HCRPfK6z8{2StkT^N(=mA?KxFh*8K;}n+l59XwPwya* zD*bsVen`;rWbQ~Zxhpr+vU*FJ?xK3z{Da&MQMo=2|M*g>D@Zr2$4L*=F|+}BL@3>4 z=v*6dy;>1|kQ)N2mr+`(2-qmM8rz)olzJYd`rPcZ(hgCC^ox2p2J>DT{q{Cj#3bkW zpUt+bvpw$(fh+jM0Yg!o0cIPA?Q(!iSp4vhxh}%l%EvW!p;R>OGNGQL_-+a`l zvs(DzMhEGLP<f+q;bdXWCv1fk%9z##3tJya4*bID^JoXpIfIA z;9pkPm!ee2nZE8da68ft2OqF7rtHjG$d8JO8edEuy$GB?s1ClDKTioEDea!O+wt`^ z!IYcZGGqDc1>_V0jf#D^?~T}&X7KG$UFz}Dp(63V`}e)X--|v8;n}g+daDTYc?eVS zd73SP@Fk37-#}Np3c2=6s-C=%oNcJCT*4!|NaRu?0#+tO&#XMQanX_Y%5 z9i_tEbDxLPF8vp1@G1QS)sx+WXZ9rvz5Y-2`P^LJvoHk-TGDxU=nB)qkwCR=qDQC z>j2Q6i2J^mNdiIaX_&>5PUcH?`iNIXayFUfTPu4YQXX$tKMew!L~K4bue)rebaS{= zS2Z!ZE-Y(P`d>e)s7=1khyapR(lmUx=)|llL79E8*4)X|ZNA_F&1*$jHtg)I*t_=Za_)%P0LyxkMKt*1oK`@DZ&gD3ZZ z9X&@0Qol==Zd|~@Gn2Ne-g7P5HbsFm>g4E$>s&R(f1UlZc_1{a6RUrc^DrU5RP-E> z;iW@aDQw<8aA7ktMXu1Isd%hRS1Pfqglo}cZ1Xrx!5s~5Tg^IopLUhYdPhHoDZrXg z!6P-WhBifZUriy`W>u?O-!Y<7xXI@)?IIo^gj<>vpC(VUO5vEG--|q+pAc&GCxy;X zkX%!_*!TX&0=HTp4zl}@yDqpZ+*tm?>Zne6-ZmeMOln2?=r?jZonPJhM8WODN~pBA z^0h5kF3N^)8~!!7@^$@j65nxxTh}8{`aouv*KJ;D&RuvVrG74C^dj>1w^=mWJF7O^ zYrC#Vndlc&i}(mIKBqEV|K%OB_aM424F+nEvO9$A0VXsq6d>yZJHT({XzZ9ED~nGL z-D75OG(7jQYFBu!)9eSg+d&ALpA~U#d4$5cuLMxSKaq|?^wY*Krc<-oSIQ!?2NxyJJEJ_T?^2(;6_u3hV&0`dl3QluRn zEdGcnxfk4XilGAv-U!bdk8X9GH5<1+MRu8Qd`SA{|AYSU((pNL;4pK%yhRb~&=Ui{ zhO0TQ+HFo)8!PVcI&=wHK^R0nn8vkaKd_2r6#R$`#QwWQJDQlx?(||NCby~LQ;0F( ziZ=YOp2R9zx4Gd~Pnb+tjh*0`gabx zOI3(`XFQ*da1m)$d00im?E*iB1RxCM^n6;g%uU8|=+Nz0}igVepfUD_mjDAFJ zZyJ<@O?{}HoJ;pg>N=l1?l&`>iDcGvDZ1J@k$OBh`oV=Pi_F!O?aegi`hXL=YsYdv z<}1z_F=%k7Llrxf*~4ual+K|zu=qtpt3fYR=Bjh$p&hTBuJvCC$H$EYyzl=#J&W;r zHM`2EK`5$5=p;8yS}U2UYc=hDJk|P+8qr9Q<6c;QyMJiQ&`9wX$8T*o4G5!?ES(nQ z0;Fl`c}Y&=qm;a4jU7MUv50ttOIW7yqXMAg5}MK&O5`Kgo=Vc_8mvR;B{W9t zQ7)FQ>odQ|G}IK z4gFK_!fRq6n{O8r8JPXMwUHQ|)bl$pF_)78p`RSok;>D%n!Rz^cqhKb%Z5p$-5){I zPwZCsB$H=T=GWAieT-^I0CL)$~$ z=d}y{t#48H@B2EH)0{;4+H?BxQy>+<&2O8yr?qd_xFYf+ z#0$6d_(2kV4nYzRvs_}kmnr6M^Ziqh^&h2Zg=vSN+s8L#kizeMJcLRIg+nRi{b&sM}nn1aq5a@vYz1@!>8}KEnJdC zMD3qYP-TaB9R&_WifwKi3W|n%uB$~Uu%9`$|7(NgXcrzCBdnwx);nI<)RBo6Cqfr5 z%Ck=$(rsz%xqnfp=u+#sClXr5K`w-D!nNkDUwwm#VxWBO#gUFK=C4Oj3V_-cXg%SL zwqN!m!;n;$?_(8(23`jL7w9nJ`L1kh&Q^iXWV~QeK!}K zTs~rWq+Lo&Q`C3XnLN=+kSM>l@(osDPUs+{U*p+ZCSp+oD_wn7mz~J2HhoNGqq;k* zU))myc5Px%o{V3$J;1qxc4|inac+7x9C6Ui8e{fx6Gu5UhU>~D_ia*Oc6Q2DMs=Gk zN(5TY7+QB4fVx1kO9MAF1T?d))}~BLa0geEUE4vcRhCC@^6Y!5Yd2P_q=d?lpak0r zZEZH0xvcSqFZ%@0^KEZO3?04+?Ik{DKafU958?8cLkHST7b_`NhHO3d@xa|1+d@FIAHGmdliiCmynV7jTtHIlrwbDbZ=Nc;EiaPp}r>C8vS2`~q$_ccNFHY0w&~Dl@AWYOD2Z z({A0kzCf$LFL(F|Yqorhe&}|DiWN-Lamw(-DLQb>pC%BJ?Z9ZG-P)=A>3xw9WLeA! zu-~;8cGv#y`Z%FyveOL~9BKYY%{w7DMxiI@cU5#dl6^^e@&}@uc}5B$Xvr;#^7Te> za}Ekhd$DLU@x$MMjqJ zdw=T4?LXkmijpc&;g2IC_eZf6BUS8{_>)U|!yiOLK5_Ah^kRyy^W?@DetX5%a|KfV zu0r&}ZQK5uS5x;B^^PWdl-RuhAKBmNkAiR%pT(RCEdF!9lwB#KN-Bp7y=|ckX6`y# z(enz*aUkN_V22w$5J3APXpReflL)GG!8i zMztqSVuxB&S}{G$7d=8$e^rC>Z!9^4MDDr9;j1gH3C?qAj>Gef*t16ih1g>XPgX-u zR_{khw2WVW>3#7h&NIt}BX%OdRzLOk-FDZSuGpebG1YEy{GWS@Vc$>F!ZXK}{GxnJ zxa*O;Swn?#xfU|~e21~rIU)f{3}y`%{E`(X(H`ponDvT>aqFr!*O`fn*%w@;M2XsAKKj_%^7{B|Y zU-3mZclY`?Z&i^`AIGq(-=bBB z`sP)(D#zrVoO6zsx6Kyk6FY|K(KfhhHN=l**XF_6_j(%?_YPu~l#X0BH<>uw>GhoA#5sp8nz1pZU1T?koReCq{NON_%hK z?RfPOogvMovltE8_j1S}JK?qCPcrQlk3l_!d486&jqFgJ%`Dod1JaL_(*PJiYInuA z1=bJjF{In0;-9eGxjROmY&!=#o5F~ZDcg$YNXIO01>v6@aWN9M&f{$=mT1*|wE3Y; z5Qtkx(`*pAJJtD=bt+!2b<_1m%zNwtqSmE>?A_JK1d4l@c}FE(;xyH!(@NxrZ-8>1 ziflhKKJtboBCljDg!P{?=oyr>lZus-=~05#+SPHgO*|KRui9p@3vPam%A?bP5Y&w;S)5|edvg-<=rpM`ll1D=F!u^Yc(g+dKC>?QxWs0Ib*31hPbM+2fX(|7>V10l;wXQXz z?_Qor51CQk8o#>a598M8t#egmRad|1`sSGh+NIgv z8HJOwv+>q2lS*FGUF-!2g^#p`Xv`hqn1pX}5nKC78{$kWzul~KuKV112j=76^FFN| zJ7<3V=@i>(-nahAIa`^;7wYSF`mTJY7jHBt)`jbeoPT01U^z0r7L)$1NtuL*pmjGQ zw5fMEr@PY3LbZT#BG@i_MJ0lyWnj&2D3GUt7R_FzKpiucp}uL`lY#lip7Ti-nX|M-Cp@*Nw;8!4~BdEh=JqAxrS z_xLicV)!N>D8Es8T7OJUUs;EO{xlhc4>N^EelmUjd;uPJ_+;N=oHyUi%XnI~b#uIbyC`3w9vd)uU%Sg`@J~uwP1{*^^wnClp7>zBGQd1+7 z_NYDk8lDL*Q05NC2nF_}|JbYwj%5N8E%Tm&&KqCdP z_nEJ6&rnk+4>S`>#>}@fi1*HVWfu#bJJaTVxxU$7 zqt<&*zn<*ck3L2#`NcdVT}6tl13QA3tX0G2`?#KZ$SeOI3yG;Kberae&Hdcy2@v8>Vj zfGAqJVhbySN5fIF11AJttDnyJAgN_+9%F1Y{EO-fSTm$p*bC_K zw@E*1b8yuZ<|@T7!ex6qFWxg7wQ@Z0T);xsrt-b`*dkIseK0zue58@`@Wc(puVhs# z(2+{`RW&7>G&Pl0%Nlly(oMs{Ba#LCTUM8nP9OSWS~p@1T9WpSwgdBwEmfnzcceHg zPWS#IO4gTrcV9g3PH>#BhgT}DXRb%K=Rd|-hP`ffYv~_S?SPU#dt=W=0{w@Y%GruC zJ-{b8`B^$=NaCiSbZ=C1?m}1Nz;P1`$ghf?zFw^)V9QBp#N*MWF$=zVQNv$Su0_+q z;w`Km#$P&Z!>vDDaLjKH;4RRXyeD|0fz%{>*OpLr_Zb5VG`dzJW_2y3_TadIZzH4s$Pbd#iQG zr?)9AOOBv0Gea6ZBmL{rEj_2>OfK`+3&+^D^eO|^Gi5_FL( zv#)>1p9KuSe|=-2EU2;*5)F7Z=~>wl<3vH(_ISz+RrD z@WjNk=pD#^@LNN#;7Q1lDD%a_SC$D~z@;bT?dd<`!KBZZ^vU>mZj;KJ+@Es_BQzv~b<} zd#*<~bzf6(8gEm2?6ze=XqR`uDA!PAP=6yebOXYuxaOC{N|c-;h-_4{9IJoy_?tb( z;lA@f-yVm=R`wM%3n@&(-QeH4T6ZcdnFE@X;IU~3SUT}wf0O=Y@|yzEK4Sg!=w+gT zg>&oXD9r1!=l^8O`S{8^^#ZM|MNbIXse-xIj&JA9b7qxvRmcGKWb~Qt%=~XRt_j@+ z25fu&vmMk?@ZRnwVUQ$aUG=xHM8lGd8+-3`*|!g1ln<*py7$}5jzbUruGq^Cr~ZRQ zPn&T^Ph%LVd+1Pl;O3wNh!61aX}V&2hfqbjH*VywdcZz6?T819_-(u$V<(Pk{^ZF! z)M4q|iV8g-5mNzM*U(rZ>@~hmu-~5TLi(orm9*k{v3K-uH=4=JLKUcTW&QNF^UWF);j%>kP&4 z$n?K+o{)Ecghqvi%#I?<&&h_E#f$4lU0gqWfKr^b^N}mstO(*>GP&V}MtM*#r~BUC zU9RlLwW)WNX8}zCjUrh-c$JXo6y=Ck58YC}1OD;>p|74ESRQAs0o21&g#Hyj3LEEe zcGvkq@J}!IvZVIOU|dc>j>|i3G6kFZfxp)*)m2FIqn>v8+2->UtO0788!D;`ttIiI z-D;J=K3CsjD({as1<-4ozb5}r3o+alnDWr6ONlWtTl75ZuJH0<`sk%l=?x*&jo`1r zU6dx<$F{`jT%HV`(*~E9!dN3WAmFys2ma1^gLWDU>W_*8VD1=meb56`uBW2k-&ss7 zyT|zc-Gx(SaR$Q`KnKSYt;iCWNT%gBpI<0PjOlg-#Q z;KZQ<_Fv+H;D$Q>ujB2mBptUsw>L_k?bGmusJKY8X(|5$`~Pl^oDHP)o&g*f3gKRv zJzg8w|2;3=ETzw{o=lnUp1gM>t{Cz6Hc(e^ZjthLv3-5pZ{M5VUliTm-j2CZcgE^) z{ms4h_bmf`SkwIAd7}r-E}80q?N$*<_B<2}rWsA2PRDXX)ZA_Pj*CVAZO^CC@xLI& z|JtDE{`Wif|344RjNrequ?&8a`hYA9_NW-x9Rl2Uy)0o;YBd7d#85>AuKu+g$*99t z5Z>Td_)9EHRL@=!yU+g$p7{IZ=Dj`X2kq-o|K*MRpGIolq;AG4l`oa*sf|mENB+LX znfcL*rh`EnM!hg?S!pPySVWs?wl~mjU}?cs7jmCwM|g0AVwb`zf?*eML4UJ_AL<^3 z+BUl7;;FIyyH)?hfm%8FMbVZ;&4H_oH?fZ6UXJ#e$l`y1M)0 zDT;#=gav@h%KVQwN{ufWDZv=I_Lv7VbzC|NGSF8bQ4WmsX8eZly}fr1aB~JrJ_yBQ zP!Tni_wqI7cl4wvm4iiOanlhr@(w0U=8%m_y%~dudHaN35&|RZP|-zZP?YFUMRF`- z2&+)GQja!T8(xzrQ}%tLFmEJ3_~?ZW@gRAj`$ViQtl_&cr(7 zB=XLbJ9cx$b{jJ+lA3qh3F@I|R`W&Q!j2h&wA~IXJ;$x2eGeQV(sQpId&EkjTuVxL z7^kbDLDrH30rO+bN1O^7nu%%1rK**kWf#81$;#R?-Hz4F!l`z2dGhpR>Pz{Nii*;{ z81~88k4oQKEK}tx*J&|Nsdh!br&CH&Glq<1)%=j^r1jDS;$us#7>{3HoEC7`5a!J< z38Nc7z15!2%n6geiU53Fp*FNb5QS`WcY4h>q-_#79v+ke#VSf|)3FV!0 z>2V}o4SZ83dK9W00)X}u{5#w3A-$zjYU|fMom|1eQFUX<0fk0Z=|B7xB&}<;B+Z6Oe7Krf7@$&Y2o4taw^3yH8**ESu^aa^Zz`e(|r z7&h*XAJoIPvI|=4(>*!AU|5&08;f2mYRH6UI~NZwj4D2bZ|NN`A5E`Oo+tp>Mg*cW zvqCZN7ySE;%XyCAO-2JH%H-GBxe8z?vqtT&68j^2{EA&@h`CgGARLSrRUJpZTAqt@ zM=x{;02=BBLzgwmvryons3y$(6OUvm)B3fX@SSdVQ@g~x-$7ts0>9{hZmcb}{h;Lg z)$a^HVQrluo)+jjx?W6bLa*^>`b)~;6KY&iYPR0EO5l%h`1*Q10`N z(W0(^9yauu29)_`-~gxdGuR`j&WM>LGN{`MD1~_|b#?mnT=KJqshE`ycFF*}cxo3Y z9@)RC-sMqg0}wWxOLo}3cq0f4;140K?&t{Lb?R0tK!p2(w1$+ic1_n!Ve6$U`kOSY zUH%IJ`R)0oleuP2{&=dJRpDD;OxY6iy7P--x-ax$e5-#aHefs3SnRX>#(FsX&S;a? zBY(~>TgVncsxUvBZZ4(~ZW{XJPV-2RXE^n9g`|L94`idtR?Tr`FkvX;x8fI;bzmYS z>tA98)!YN5FXc=I!+%24E24l_bnM!6MiY58_QLc#cy-5}k%OyhtM{jgCP;QlxHkg2 znql-QmhSqt*u*PFnlqlG({l>$-+ks!MlCNrY>;%OU&nY}RH)m8+lj^Eb7tk6k%-3# zrL3s(9{068oAzy@CMC_MdUlnl{}uX{w@g2c7+S_NU%7IKwYT3L-Q#lDxRdp%OE8oA z78}A>z3nTvyoWFu)hPz{p@f6^M^^pZk+wFL@K|8{AQRnNp)qtsDsh+PD^Mj&)Ur$K z?FIu))84J#qRDFHj^KCoR4K9p@2(P6ptmn6FrM6l05I*g z9?2%|#P;7AkoQ{Ahq!yX+AWRhXhP}ze{gZ4>%fa0&y&X`ZnrFX;wT)9!RVd>aO0)s)aX7xP~=abS?7QNLmKROEKOPs1u zaZWADRkJFce)cxYI(%>}9qU~>ZE99t)h;1FGOcS z8h|naq|1r9Q4z~h9Z2G!kP>e$MBtejL{IifY>@En*#L_)edQfu+lR`Py&2Bin_;Yn zsv6e%%Pg1Be z)`GKM&$P-}HIgDcOs77*nR^5HR7Av# z*M`T;er-F_Cnl3znC?W8aT=uS_;{@K`goxCMM&*Zmt7>eFOLoRLzp zjk;=jQOv1K%8;%GitgxIWb_zHEGeC!N0oU#PqV+JXuLC$d%IJC_RtwrSl`sYrDT#M z`#5O|Wt8ARF-vN)`7%MgM-LPpm_wBXOai?EsK0KWopz<`)Eb6)cz6m0Tw7;whterY zU~d%h=XfCqf*YItG*`XL;3tdlkwT2!W*Z3%jqApSV|@?UW22S>B8z8R4Ot1ZHY8z%{D|o=e64c#!PFKA{d|7^RRH~7 zrruYlT{V9*Q#fKULaFc#ay6^Yx7V}{O^;zQK!3WDBMrHB?hIc@$@`xv{#1Xh^Ifot zVu4@pW-DR4&y%KOXYx4S`UCm-O=U2sfJK=1f$2JL1`KXuV>=ZfK$*mvg{~IzHpBRqFN!kWo5Q)yo()CB^_;a$pM;F&jy3yMv1Y6T-8v(i89CS=zGe^RN7d6;@yz<0F= zSX)4l1wy2T9Nv^N0hwugC3gR=Lu<{GWrIdV5tZ`6Sry>)FB$Z5K(rjM(N}byhiR6s z*#cHgfd&r^n>p2lYq8ff>FfzdcNxV`epSYO~}YUa298gBFq?%J2YkIQcda%a=f73)rQcX#tJ^y*1mrrErYkxe@@NAT%sDJP z0Q{j))>stL*iBT}iWC$e{BWo3-=51HDD#+|UQX#QM&@Tl*g+O@@maCa4=KlTVquz(o*nNT% zud0c&Kl&2%yZfI%7h1g*?{Igob)Cls Lk18M9zy99<5dF(@ literal 0 HcmV?d00001 diff --git a/dev/sas_token.md b/dev/sas_token.md new file mode 100644 index 00000000..48c23148 --- /dev/null +++ b/dev/sas_token.md @@ -0,0 +1,3 @@ +# SAS token settings + +![Alt text](image.png) diff --git a/notebooks/azure-blob.ipynb b/notebooks/azure-blob.ipynb index 18941500..ab911574 100644 --- a/notebooks/azure-blob.ipynb +++ b/notebooks/azure-blob.ipynb @@ -47,9 +47,10 @@ "api_version = \"2023-08-03\"\n", "request_time = datetime.datetime.utcnow().strftime(\"%a, %d %b %Y %H:%M:%S GMT\")\n", "\n", - "folderName = \"logging\"\n", + "containerName = \"ucsd-pilot\"\n", "\n", - "url = f\"https://{storage_account_name}.dfs.core.windows.net/{folderName}?recursive=true&resource=filesystem&{storage_account_sas_token}\"\n", + "url = f\"https://{storage_account_name}.dfs.core.windows.net/{containerName}?directory=Cirrus/4001&recursive=false&resource=filesystem&{storage_account_sas_token}\"\n", + "# url = f\"https://{storage_account_name}.dfs.core.windows.net/{folderName}?recursive=false&resource=filesystem&prefix=CGM%2F&delimiter=%2F&{storage_account_sas_token}\"\n", "\n", "headers = {\n", " \"x-ms-date\": request_time,\n", @@ -57,8 +58,8 @@ "}\n", "\n", "response = requests.get(url, headers=headers)\n", - "print(response.text)\n", - "# pprint(response.json())" + "# print(response.text)\n", + "pprint(response.json())" ] }, { From 4d3d17500cadb50580c871804275eeb4bec03b58 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 31 Oct 2023 17:13:48 -0700 Subject: [PATCH 324/505] =?UTF-8?q?=E2=9C=A8=20feat:=20add=20files=20endpo?= =?UTF-8?q?int?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/file.py | 3 +-- poetry.lock | 38 +++++++++++++------------------------- pyproject.toml | 3 +++ 3 files changed, 17 insertions(+), 27 deletions(-) diff --git a/apis/file.py b/apis/file.py index 68da6aab..8b35d413 100644 --- a/apis/file.py +++ b/apis/file.py @@ -2,7 +2,6 @@ import datetime import importlib import os - from urllib.parse import quote import requests @@ -23,7 +22,7 @@ class Files(Resource): @api.param("path", "The folder path on the file system") @api.response(200, "Success") @api.response(400, "Validation Error") - def get(self, study_id): + def get(self, study_id): # pylint: disable=unused-argument """Return a list of all files for a study""" # todo: anticipating that each study will have a folder in the storage account diff --git a/poetry.lock b/poetry.lock index 5b50c575..a31ecd09 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1040,7 +1040,6 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -1049,7 +1048,6 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -1079,7 +1077,6 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -1088,7 +1085,6 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -1801,16 +1797,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2749,7 +2735,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2757,15 +2742,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2782,7 +2760,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2790,7 +2767,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3383,6 +3359,18 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, + {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, +] + [[package]] name = "types-requests" version = "2.31.0.2" @@ -3702,4 +3690,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "dd13076e47cb3d15d794c986e0cb6d54a72f36accdf6a5441557da63171cfbb1" +content-hash = "e3e1b2d0645e5cd7ad0281091d65e85b411eab0ddd7c475762e908bf9c10bdb4" diff --git a/pyproject.toml b/pyproject.toml index ca68dfba..2dee3a88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,6 +89,9 @@ poethepoet = "^0.20.0" jupyter = "^1.0.0" flake8 = "^6.0.0" +# Types +types-python-dateutil = "^2.8.19.14" + # Environment [tool.poe.tasks] From 21eb53aaedc55ee18ec1b16085855a757ece7864 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 31 Oct 2023 17:28:12 -0700 Subject: [PATCH 325/505] =?UTF-8?q?=E2=9C=A8=20feat:=20add=20id=20to=20fil?= =?UTF-8?q?es?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/file.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/apis/file.py b/apis/file.py index 8b35d413..efe890b8 100644 --- a/apis/file.py +++ b/apis/file.py @@ -2,6 +2,7 @@ import datetime import importlib import os +import uuid from urllib.parse import quote import requests @@ -87,10 +88,11 @@ def get(self, study_id): # pylint: disable=unused-argument for file in response_json["paths"]: data = { - "contentLength": file["contentLength"], - "creationTime": file["creationTime"], + "id": str(uuid.uuid4()), + "content_length": file["contentLength"], + "created_at": file["creationTime"], "name": file["name"], - "isDirectory": bool("isDirectory" in file and file["isDirectory"]), + "is_directory": bool("isDirectory" in file and file["isDirectory"]), } # convert lastModified to unix timestamp @@ -100,7 +102,7 @@ def get(self, study_id): # pylint: disable=unused-argument date_string, "%a, %d %b %Y %H:%M:%S %Z" ) utc_timestamp = date_object.replace(tzinfo=tz.tzutc()).timestamp() - data["lastModified"] = utc_timestamp + data["updated_on"] = utc_timestamp paths.append(data) From 9c5ae268b1ebe6465d045a09e1fe6bc5eae10ba5 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 31 Oct 2023 18:12:12 -0700 Subject: [PATCH 326/505] =?UTF-8?q?=F0=9F=90=9Bfix:=20ignore=20created=20t?= =?UTF-8?q?ime?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/file.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/apis/file.py b/apis/file.py index efe890b8..27434bcd 100644 --- a/apis/file.py +++ b/apis/file.py @@ -1,12 +1,11 @@ """APIs for study files""" -import datetime import importlib import os import uuid +from datetime import datetime, timezone from urllib.parse import quote import requests -from dateutil import tz from flask_restx import Namespace, Resource, reqparse api = Namespace("File", description="File operations", path="/") @@ -47,9 +46,7 @@ def get(self, study_id): # pylint: disable=unused-argument storage_account_name = config.FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME storage_account_sas_token = config.FAIRHUB_AZURE_READ_SAS_TOKEN - request_time = datetime.datetime.now(datetime.timezone.utc).strftime( - "%a, %d %b %Y %H:%M:%S GMT" - ) + request_time = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT") container = "pooled-data-pilot" # todo: this should be the study id @@ -90,19 +87,20 @@ def get(self, study_id): # pylint: disable=unused-argument data = { "id": str(uuid.uuid4()), "content_length": file["contentLength"], - "created_at": file["creationTime"], + # "created_at": file["creationTime"], "name": file["name"], "is_directory": bool("isDirectory" in file and file["isDirectory"]), + "last_modified": file["lastModified"], } # convert lastModified to unix timestamp if "lastModified" in file: date_string = file["lastModified"] - date_object = datetime.datetime.strptime( + date_object = datetime.strptime( date_string, "%a, %d %b %Y %H:%M:%S %Z" ) - utc_timestamp = date_object.replace(tzinfo=tz.tzutc()).timestamp() - data["updated_on"] = utc_timestamp + + data["updated_on"] = int(date_object.timestamp()) paths.append(data) From 4530e5e449a57f068dccee4111b7837fb7b94146 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 1 Nov 2023 16:09:19 -0700 Subject: [PATCH 327/505] =?UTF-8?q?=E2=9C=A8=20feat:=20add=20files=20endpo?= =?UTF-8?q?int=20(#18)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🧐 wip: test blob access * chore: remove output * 🚑 fix: use a sas token * feat: add api endpoint to request files from Azure * ✨ feat: add files endpoint * ✨ feat: add id to files * 🐛fix: ignore created time --- .flake8 | 2 +- .markdownlint.json | 1 + apis/__init__.py | 6 +- apis/file.py | 110 +++++++++++++++++++++++++ apis/study.py | 27 +++++-- config.py | 3 + dev/image.png | Bin 0 -> 52415 bytes dev/sas_token.md | 3 + notebooks/azure-blob.ipynb | 162 +++++++++++++++++++++++++++++++++++++ poetry.lock | 24 +++--- pyproject.toml | 3 + 11 files changed, 323 insertions(+), 18 deletions(-) create mode 100644 apis/file.py create mode 100644 dev/image.png create mode 100644 dev/sas_token.md create mode 100644 notebooks/azure-blob.ipynb diff --git a/.flake8 b/.flake8 index 1f518c23..2f20d8ee 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,3 @@ [flake8] max-line-length = 120 -ignore= W293 \ No newline at end of file +ignore= W293,W503 \ No newline at end of file diff --git a/.markdownlint.json b/.markdownlint.json index 1d6b0e1f..bd7eaeff 100644 --- a/.markdownlint.json +++ b/.markdownlint.json @@ -4,6 +4,7 @@ "MD024": { "siblings_only": true }, + "MD033": false, "MD036": false, "MD046": false } diff --git a/apis/__init__.py b/apis/__init__.py index 26c8f39e..bfb5d0c0 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -22,6 +22,7 @@ from .dataset_metadata.dataset_rights import api as rights from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_title import api as title +from .file import api as file_api from .participant import api as participants_api from .study import api as study_api from .study_metadata.study_arm import api as arm @@ -98,7 +99,9 @@ @api.route("/echo", endpoint="echo") -class HelloWorld(Resource): +class HelloEverynyan(Resource): + """Test if the server is active""" + @api.response(200, "Success") @api.response(400, "Validation Error") def get(self): @@ -108,6 +111,7 @@ def get(self): api.add_namespace(study_api) +api.add_namespace(file_api) api.add_namespace(dataset_api) api.add_namespace(participants_api) api.add_namespace(contributors_api) diff --git a/apis/file.py b/apis/file.py new file mode 100644 index 00000000..27434bcd --- /dev/null +++ b/apis/file.py @@ -0,0 +1,110 @@ +"""APIs for study files""" +import importlib +import os +import uuid +from datetime import datetime, timezone +from urllib.parse import quote + +import requests +from flask_restx import Namespace, Resource, reqparse + +api = Namespace("File", description="File operations", path="/") + + +@api.route("/study//files") +class Files(Resource): + """Files for a study""" + + parser = reqparse.RequestParser() + parser.add_argument("path", type=str, required=False, location="args") + + @api.doc(description="Return a list of all files for a study") + @api.param("path", "The folder path on the file system") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def get(self, study_id): # pylint: disable=unused-argument + """Return a list of all files for a study""" + + # todo: anticipating that each study will have a folder in the storage account + # with the same name as the study id. + + # Determine the appropriate configuration module + # based on the testing context + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + + config_module = importlib.import_module(config_module_name) + + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module + + storage_account_name = config.FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME + storage_account_sas_token = config.FAIRHUB_AZURE_READ_SAS_TOKEN + request_time = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT") + + container = "pooled-data-pilot" # todo: this should be the study id + + query_params = ( + f"recursive=false&resource=filesystem&{storage_account_sas_token}" + ) + + request_args = self.parser.parse_args() + + # subdirectory traversal + if prefix_path := request_args["path"]: + print(prefix_path) + query_path = quote(prefix_path.encode("utf-8")) + query_params = f"directory={query_path}&{query_params}" + + url = f"https://{storage_account_name}.dfs.core.windows.net/{container}?{query_params}" # noqa: E501 # pylint: disable=line-too-long + + print(url) + + api_version = "2023-08-03" + headers = { + "x-ms-date": request_time, + "x-ms-version": api_version, + } + + try: + response = requests.get( + url, + headers=headers, + timeout=30, + ) + + response_json = response.json() + + paths = [] + + for file in response_json["paths"]: + data = { + "id": str(uuid.uuid4()), + "content_length": file["contentLength"], + # "created_at": file["creationTime"], + "name": file["name"], + "is_directory": bool("isDirectory" in file and file["isDirectory"]), + "last_modified": file["lastModified"], + } + + # convert lastModified to unix timestamp + if "lastModified" in file: + date_string = file["lastModified"] + date_object = datetime.strptime( + date_string, "%a, %d %b %Y %H:%M:%S %Z" + ) + + data["updated_on"] = int(date_object.timestamp()) + + paths.append(data) + + return paths + except requests.exceptions.RequestException as e: + print(f"An error occurred: {e}") + return "Something went wrong with the request", 500 diff --git a/apis/study.py b/apis/study.py index f05e697e..34186e0f 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,3 +1,4 @@ +"""APIs for study operations""" "" from typing import Any, Union from flask import g, request @@ -22,6 +23,8 @@ @api.route("/study") class Studies(Resource): + """All studies""" + parser_study = reqparse.RequestParser(bundle_errors=True) parser_study.add_argument( "title", type=str, required=True, location="json", help="The title of the Study" @@ -39,17 +42,15 @@ class Studies(Resource): @api.response(400, "Validation Error") # @api.marshal_with(study_model) def get(self): - """this code ensure each user access and see only allowed studies""" - # studies = Study.query.filter( - # Study.study_contributors.any(User.id == g.user.id) - # ).all() - # studies = Study.query.filter(User.id == g.user.id).all() + """Return a list of all studies""" study_contributors = model.StudyContributor.query.filter( model.StudyContributor.user_id == g.user.id ).all() # Filter contributors where user_id matches the user's id + study_ids = [contributor.study_id for contributor in study_contributors] studies = model.Study.query.filter(model.Study.id.in_(study_ids)).all() + return [s.to_dict() for s in studies] @api.expect(study_model) @@ -57,6 +58,7 @@ def get(self): @api.response(400, "Validation Error") def post(self): """Create a new study""" + # Schema validation schema = { "type": "object", @@ -77,22 +79,30 @@ def post(self): add_study = model.Study.from_data(data) model.db.session.add(add_study) + study_id = add_study.id study_ = model.Study.query.get(study_id) + study_contributor = model.StudyContributor.from_data(study_, g.user, "owner") model.db.session.add(study_contributor) + model.db.session.commit() + return study_.to_dict() @api.route("/study/") class StudyResource(Resource): + """Return a study's details""" + @api.doc(description="Get a study's details") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(study) def get(self, study_id: int): + """Return a study's details""" study1 = model.Study.query.get(study_id) + return study1.to_dict() @api.expect(study_model) @@ -118,19 +128,25 @@ def put(self, study_id: int): return e.message, 400 update_study = model.Study.query.get(study_id) + if not is_granted("update_study", update_study): return "Access denied, you can not modify", 403 + update_study.update(request.json) model.db.session.commit() + return update_study.to_dict() @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc(description="Delete a study") def delete(self, study_id: int): + """Delete a study""" study = model.Study.query.get(study_id) + if not is_granted("delete_study", study): return "Access denied, you can not delete study", 403 + # for d in study.dataset: # for version in d.dataset_versions: # version.participants.clear() @@ -140,6 +156,7 @@ def delete(self, study_id: int): # model.db.session.delete(d) # for p in study.participants: # model.db.session.delete(p) + model.db.session.delete(study) model.db.session.commit() diff --git a/config.py b/config.py index 1c8e35cb..9eac5d19 100644 --- a/config.py +++ b/config.py @@ -7,3 +7,6 @@ FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") + +FAIRHUB_AZURE_READ_SAS_TOKEN = environ.get("FAIRHUB_AZURE_READ_SAS_TOKEN") +FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = environ.get("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") diff --git a/dev/image.png b/dev/image.png new file mode 100644 index 0000000000000000000000000000000000000000..90ae014ab5c8ece706ac04506a763e2e93c749f3 GIT binary patch literal 52415 zcmd43XIN9)yD#dpu@r%&2nYyRQ9wkgQUWR>N>ypnQ6!Ymq>}(q5fBs*kuD%0H6)=W zln_8^ixOH$0)zySP6#cK0HK`V+W&p`zNhT_;oRq(4?LL(bB>HT<{0CB-(N}eBO_g& z<08io95}%9@WH(&2M!#<9yoBY{m-MEPww(xZsffD;rm4Q?g2#4**VUe!_IdM?;JQ# z9?!k~>vg^nT1sR^? zAeXm>6Bq7$xW~`E+$PF(|8I1kCm!x`hx*(g{MDIgW+)Zyb;JG5+r2-3y;&i91d1NB zd3~0@8vbZm>@CDJivH(|`f3`1o)8r)mU8ckEr6L!6H_(3y^ zp>fD}Zw#`S*}pyg1ybNJ_!p<22Mz?!UK!o{eKaX{S8o5{oYzV4ANvQv@B6s-U;nm# zYq$S;n|fjG;Qqm?V2;rK>&NOpIm2?`z^i(l+5NjdFe&?gq>1DAc~~Kxzv!qKUN1ru zy5jSUg}sdjNa@)j9dg%6hDnl%L?@dZNE8;I7PMZXzY@dn~RL3XK-LGHplG3R!F=S{y$_=Ezc5RpN zG|Ln?^FgHS?9~WqBq+~WPF~^<;zx8 zMJ=%jJy~LF;;+wbj0ErB-A{0$?<{@EjSv}QZB|yQC(5f-yz~5edH36Tby3B=u5ghv z8X$3O$-;;EXwAw}OJhTQ-3wRk&LroGe3G!9m|y%Tt#$J8>*_co%1(uK`$^Z(Ozw8c zTe|Af0t{lPIfn2pGPU~6O?UhP*IFsno1tu>JGx;7$Cpp;`d?$QdihP7GhlvgJ&h|C zrk2za{!q^+W$}R(_8f2GH)#OK+Kknb&HdPzd*MMG3~ zV!0W?m!jrAyI4Lx^!D%YD6>kZSWB4P6Rd9RW0jARK8%pL(co5-3UDJY{j&&(E7McQ z)XoesUj1f-S|lQI)%TA<1M4Yc#)mJah1K!94sV3_23+z)mbl-5(#*-PD5Tt@{9P)t zjQe^KGG@YJ#J+cw>f94-<(pbR3nD$qX-T1P^Lr0*p4b7>`!9FBwh1Yx$)@?%wu{%S z$BN#>c{ZZ`56Z-N`t*Luh!H^jA$R(Q2Yzn6B+1IQ{N~NkI`7w>e^&T(joibJ4SYhm ziLE#1nj(Hx5u8!(QFSo80X?#T+TaJPd2?-@vPGSk#RYTsTfPc&e~Sg`i(SDwvEv6} zS5MdQQ^lWjFF-C0vC5uebE7AoA|jtwM^cq1a1E^`YLo^*p}uP5n71*GKdrldzpo$V zeE!6~^v|9py4(sfAB2$)*JwuV~c3c~bjZc3MRJ6`d&4!tvon$UzUQ&Fxe{aK0 z>&`#=$YcIbWejQzybz#fm#TcJe6C+NkCTe#k|a`JX-~i z{xQIpbW;G7rQ$y1uVM1dHt;e;`#J?WlJNqh8-LwESr|bgma^8i8gDKn+slaoeySK= zClg_9zMw?f^hJr?n=2JopsH8&Ut%|YFg!m9S6@Y`I9qptmR4*VE})ua_UA&4_~{QK zi>kn!AdSTxUgVBtuC<`7-eEg+DsyO*%#WYus{VHP+3z#${sJ>z@zkJ|Ns%_>XGvHR zY1b>fY$x+~u&S~9R}_<}9-I3eog4)JzsIkswL~50)(B*4M=MxYYYPMQbCjIfSz;gC zAvR(jH-?b8D5=9UV5vhr>ywXxobEJrCz%*7Nrd>%OU%k*N_mb{ZVJ6Fgq;AYan7)n zBkgAnH$^|8{3o>*NC040m};`rF{F+3Nj58`2(k0T zh|wds!xkzYJPTc}M*tQ9TouvPq&P++`TfU80Whq%7YTebpB4^|eVlzsgB17MunSq}lZM zVZO~CS{#1y8CKr)7ITMvs~|TLjI#fdSsv)t?xjQ6==>^kh;eJ#RcG{bnwwku2q(zM z3|b1~(|$hXf|v~p8xFe6R6a}@t`JjG_?d$5#~q1ToO?Ni*4|{A{mJ=`8@7n2vpd`A z_K$G3TF8Sc8cGb)v$3vM^(C~>bHEC;jEMq{+YQO%b`s>jEgd-n)sk(`cB_}f+2NPI zUSnoAl%OU|0L0xL7APptzz{a`HaXVCpHW%j>ZY88)x-8NP=7304Q>gUCd)F^5z-uOeZuJVCyxX2HLp1=7wWyZqraB08p z(d{|in6+hFHbSqg{524vx;xe|I?-g(OQ|Mkc;Df``v zKf|1?P_@>X$Z79hpGe1Ey4_3|9aCu%*tp;}82^AqYm01^dA5b*Gf7sPmS}fCikKm{ zyoIYu&1O3PXf&?@Ra3VOGDqY?{&>-N&q~P3*s0wI&>v`qaNyj<&zLl3pD1s=2_7DK z+=6WS7`e%ZAbz=7d=-J_ge2cfgQb<;vGd9j#OOyjY778_0wb)0hJgS<53`wK8TR=w z@IgZZp1{aOUmk*8+*C?yq-=+eG_2@x?=pF}N#I5AdeU(gBc_v5xsvJ#E%EOzZ;zDQ zI5hFDT7@=eXl<5RqghgwguR9Xi& zo##+BJ8D%XZZ_Fx43bk0I;T2RjK!>l0q(j3^DDxdu}Uz3iH;UVTQqh#P*M6FG$ zN^M~rj~-JS3_K|y=WQ&94{Fx}yq2VAQ9blWHx{f&QFm9jnneJCMk}rlWcV{_GAD!J z+|885;g*4T+nJsjb4PN8^6<5~7jP@xC?!Q6fUvVnrOfcbbEAV=b-vpbUAqx?XSuIb zXW2B|=8We7V1-l!bysCC!W+t2jfu&FnQuFs~xreCx}jUpScDZI`_e)A=2dF z3MuBE?xXJ$ugrz&qEpUWr`dKiiHcFvZ^aE|t$;ehNgYF7j*hDVuYZ<%bW~9S_je2% zZjTRcBN_mBZY}6x7oPJMOjpPwV9GEh_c2DSF7l^;oFV= zV5$-i2ne(fESNgAfX&=IiO(NA(FcU+?IT3TLHFLfAnh6`N zQeAs0;8F;EzO_`{<~;2m>{tKy__!|gcbfiN*({dqon)) zvljxK1Y`4aD;XppMlE^m;Uv|IY^WJ=3)G7j@0-7?a-1~!UQPp986FYE8`&*1uf`R9jksbOLz*+`xYJ{aS4u@FBfc>e2R?NY-{rMiR%EuzjK z2urg;7vDGZ`+3NB0@j%RvvY=K;G*YwS&LiAK5z2U0d>H=x_s8nvz|b zSB1;f2gS=2u_~MW>7LZj{@;j-kgg*%&&Ho^z@KnOyIm^$xxu2LOP)KYXDKXotG#eEvn#e@A~k$ zXuDh_QMhc%;xRIn7nE7Ou%}3qen@FhpAN%lVvwO|;E;!$DKV?m$vuUqtx$X^2n+#N z6@9z+UNEWV=D=`eIQ8ORhsTgCX5G#G`@Dhm;l=LQ!bcvxgdI;db8XI%L^&US?wH!- z=Nvu_4SsYe+!>c=k(Buu@GLxSzrKu0YYu$&G{)^gn0qV?qg}Fn@7@urnwmeW%TrAE z+q(btvb3K<^di%4YnCP5JW=F1^?1dtQX_?w%s{rhRuh6#TQ+cN%jPmmui$oyn(9wv zpac0-rNr*WY|jNTf3oOoiK%nzrF5ZcKfU-7r@*3a?)ol*C4q*98YCS#0#pZvP<93s z4n=#^hmab|$xqSE24s&;_q=K?9L(!+uvu`Y(5V#*`+?a-L!h;JwFs{shSDHm`xNQ& zOFydHDa3Vfal&fuU1N8X!y-#>Z{>Y2EwRg@=DhtS`F6C}FAL?FzP%c5J{b~D8M3(TY5m>o9v26V^mTFpB zG~7n^jOle>DmEvaEq_grvdOE%eb=0EU=UxpoK>AWvP>a-r>3}~LaC%&TTaF_n}m8? z^CXZHU_Wc~fF|QG0)RlfmvZZEgwW@M>|ae5h&J6sCo?qlFp!n6jg}81ciYxKvP&}{ z@~GUa2X;S0E6A|_)X)CCZVF6{a!AB8ZOUeI<|)ygM3s`SdE}OoX!zB-4uG%w*Q)Q= zPRsufOW7AD6Z@4c&s~Sw7`(ZbD#fknPa@_)eVn(0lrZx5?OSDjg@| z(|*I!i%$v|chhl_R%{WRTfE|<)`WX;Xv&p(g4Gr{R_}K5`X|H6?NA-a~CooDPygCBw^Kk^)BVMI1 zfM!=$+0)Dmfu?$5R|tE_romCdjt9%NI8P)-zVTss`E%p=o*H0nG0q*|{PPG;EV*7a zb4>A8tsP@%8jW#}>KU5Gaf;-C7w!fd^*2W0xfz8|t>+^4PAH4%tJ^iIGF#jY!3lO- zm4W_q1fy8Tm+WIO4fOA`skyF z?tU0?n3v?mB5FMXy`eBj3$xS|MA`_f9C;)Ps4_RM7aaVxY510o{Vr}zKJ#ydulu;3 zStqZMYZ7-by2JBMXlNwl-A?*1V)%FuGJSA9%ed44#>qPC9ywbHUrtxcpu^?K))IA5 zfs-}q*2NjWG$23^)k zxdI9=g&P|ZZ*LjKzOSMo9K@!}_o`H+o`g0nee8%ul8DC^=M0@P7L~KCGi<#2)XVAq zsxEdm{lyqNDnh=pD`6hU&y2!SKkc|`moU`wS9MCN!LJF(lS_}LQa}c?-R9wz1@!QC z(<;1|ntR4+zR-=M&~2I69C?NKkr{c?pBPBY4mBe@E88&bx(fl%ZE-n0kKIXlIyg?O zwc+88FIKI|zNIf5KjM9M-gYElE9oilAbMtcBfn-J?^J2~VEuU{qY^%aVr^WvPZ$~h z9rq1V-aPg!=vLoh>-*(i*q(Z`Gn^wS5I99;ull`fpP()+k7Qr2FiZ_^0=1LHGa!)X zBilcdKcCyrKM&xe`!c*1mgt(-07Oo4e@Y2Ul3cS={y8#YBkD7;qE9sz9NzQz3*CtS z4I*2d%2DxZA(&U+<)J(VF!d^FZ>7EI|G>pI`u_r$JLdxb!JboN|H}+OrjVME?d3&E zG+rb@masEz20dI_8+Sq%eaU|I{wPu3oeeyRieK96P~Ykw+sPAEP+fwK8P8VlQewBJ z$eOcl=PW)0_E+eayGt=s*@F^c#SbOeL+zuf9l<;6q;*ki+G*OH*_}HZEJwyzWAG%; z&T_j6x9uOI4Qk>$msQ@{m;tuiJ|FUWRk6QG0js~HtB^&g@nLIuo-V5k6(;b@S@i2F zewO(1INC|4-w9kDrci}yS2!>-5rcVVpk>|mUTWgR77AHq?jcmEh~Oj>b*B_UTX^=< z${KeCxizrFwotKR(1I5H7}?(V?zcM2LtMnGzakk4LTcJhPf40QD|mG|h@pE%s_cY9 zuw7dk2jdm33%Fv~_5jyJLO~+8D`NE2&kpX%y?9Kh$oDANUOJ_NuX;rA50kdNb%F8`Fwb_GST1pxn^j1P>uC=JNCr&&W_fjWN@N%5W|LODXZ+Gc{f`V+$SFGQSr;r^X}7a~Y(`Bez7xpFDHhFVU@t9{$K{nt z#2>a+o8QWC{# zx3g9NYEN4=9+k0a9{DE;ICI!S^kZJLh~D5=4^!LRWZdCc!<#(ViKA?_wY36iN_z59 zHikFj)yL8!v8KqIy@r25O3XyjrAC9v$!3E^&Kn-kpU2a|KY8w%h2%lhB2^LL#qFS_ z^Kf#=( z^aWQ38ins1%f`#dTxEA~QEw;2RF1w^M_#qrzv=v%hR6NdR}4NF3V3-vp-sa(*pL)6 zSMA=9^{_rnRyIMq`p6XeJ`DLIvIUNv}G6Vg)(u-IqXqIM2_#2bVCPEfvat{yP8$qZrT0bOp_3|hl;Ij7|m}# zvLV@`dKEY~Ik{!4oUhsaLldai-ABm=WCI?M<8VRO9me!*PhzKQJGW43(3wwRMwrTM zd{+{Dq1j&9z;=T>)l?`FW9H|3$(N^IDt>cu?u_hWm&8)JIXtgfP3t*gwwgK^lY33* zI_GJudCQE3QP8Idrk)z>pw3$9YT7TLxNwEkRI#dVaks2fupyTc}0WrnyuF(WwM8| zlG4}oN)2Wdi@tvWrgiSFf=IHBPOm+FXqYz|RlV5r(YZLYJ{|KM=Nta4A24oIzA*8M z*uCC0?N+71>EVIZ@7>3-p%4@GYa*(}DbE*cg`O{w_>og0hVFm45~7hO9-7Gr2q)$l zJSdg6plx~2n6=}4Ma8+RXbI@gab*QvbK??4>r;&b5*kk@Bar8oM}VjlOzi{zOpE*P z@oLohF|YWS3o9dWpE#*U9G0ZYrF>2I{hOlcF=GwaM9spKD_7ZLDo$FYq@5`)yKIkw z(#$S=Snl5e*HPxp5a!#k$g2gB$TbXS!7?9&-^T?%^lI_QcnzBA`Or8lT07yX!v{A^ z&QOi_*fu!Hxu1{yocrlghHW_XeZgx;tND|Ub+L~cfN zz^~xLQSe|;m~H3MT2|5W(uAQFbs*+EfhXOy_Jv*Go>w2lcNpy`Eq}gutjCB)_FbLu zm5X>)9wq0-gYLt72r2g7&(1KlgUEJvtZk#gsrKgwk#UE`pht4uzCeE@k$se|-6|=V zSM{`dXFHZ~LCN$mO+Nl-*9iqUhhymQQ9$wsD|MWNQ!5gv8T*mpk0ImVBo7YKw5pyd1U7EXYh0nQ#{tDV}-u2$>5kz?qB$H$?%;uSD>AlVQ^gt@F zY6O)pY&jgviQkmI$zdnD8bX&7^{1f78)CE?aa?jmHR~fAs}L2YEzG%~sp{{!Eb0uI zygr%8UZ+BqYHubw9$K2wtTtqsSzvCG;|I6g*e*?7-vbxJ5%gyuI>~@XDtzT|1xEcAK z=$B*n<3OO|;llTMp7#*@naw@6UB-fsMcY28f{lkh<_ld6JjJKJA5S^R>ca>s=P3%N zED+|>5}JWgKa|`=Zha;Aa*w{V;jE3Oujn!5;i&=(4RO{F1Zsj|-^Xvfo5dPh+03Ng z5)GsO-YjnL8(vsr(jM)EcnpO+%`AL?CB08BP*|lg@ku1o|`d6$66tZ zYMn~^lf_}WyS9?yR*yxfPHVWZns-+1fprv#$J(>My;Xy zE?U@?%Rox6Yj!4()n5whS1fFtC$kanqK9-DJGbv38T0@(eAdsBN&BR zoGfW+P|UIh{V0{F^f+T%=osx9z2gTC;Dz}#8K{@tKk3!|%-=0u1?nC@_^ABBI&g!> ztL8SJ)V|dk>W_n!alb(k-Zx)) zC;;WN8D&AsWp3!h=Lbu%;CPWd&vSXPYFo9J7i))8WJLvNw_J&XNNx4}w)Hz_KNN+S z)kcoRTyfL4SQ!3R@%dWaV}HVK^2I>Y^rl5VvgL)qQFzgCdo{NByo!K~2=|-NbGg4F z^oEH~J@>>7#bL=F=6TT+O0*GX+mR7a?w!9F*QU4@CssB!3eUNzu23%O`7Q{T+T6p- z3yv>a3*LY14h4Tpt+m;QJ}fsht0P1-?4$Lwv%Ca(V0G@SB&)mp`_gD|W$CeGv#_!% z!@?+-xto7$TZ=dC$C0}8f2PTTY%8C!pTda!&v=5LA#GY+VcDlKa*M@-bOP}xE-Fvm zSXx=b$ak*zs!blyT04W-UE^+zlvYoMw5JYSQ`Jd`;ij0x^K=dWC{05*0hD?zByLH< zS@EE3R{mX2YN$$z!b0W0ppP`kC=N5piuR0+40qPS);X@Leb#<8FKndmv=6czgu_Lv zl)la1D&DRB)~ViohX3x)_%nj(=S@^{TMPi8#;-_Q7w4)P;eIujv-Bpi&4cPG*aXVLAqT1tL-0v!}dZjE^MFLj~+{ph0&$hMDf} zrCI84!C*yHD*4-eEH193G8r6mcvip-N9< zsezDOuma+tdkr>JSbK#Pv?z)GKX^r0>U-Hz`QB@yFY;4k3^jZg)Ux)ujW_w!#sMIUMD3Aqek$JDb$XRl$;oEGxP}cAcYa55x!H5kaq-l`_w;hLr;ViA&vety zfENPpz6r#+qV$rP55>=}lLM5ep6c@n$|TR4f;1#Yxw=wUmrC`-8TZ_KasO*>KlRx0 z`^Xg0oAlnTQPpoF3X(1Z$5otHf=1b5cI?V~=wrTr1<`}Zj=o;T1{o5gpH98~y-ydM zTYG-@e+n!p2>eTohRFVh8>v~GTvPuoOgqqh|6j<$|CIVqGFig?mrOqli?ub@thI;k zP$8b(liQZk0Q6+Deb?8q1$#S< zd(|B9VQ)Ko&sWwuAVcul_B+vVn!pmMsr#eJ?2bx&eU8L#kQ?;&Atv7(6f&7&dZP<^ zk(HQTuZ_cTt@X!;4aTN@A=yK1{ryo}TO@mb$JORl4q}ngF!(J#bit9+r6Hf;I+PIr zfSnep)eL(OTxVw?7Qg#RG(8PLV#Uva4cC}ql-Ld^x;s5RJs65TVaC^ZbE--7@03PT z%7m7LdWrGq%P7qaUlZ=VrJZ%iR;7hK0SULAY5NIoow?`${JS`qF}3>#ij%}CyxuAB zERp#88fL+?k)F|UHKhY%XJ?RbiY_Bm@|n#DmXZUo z=K9iHY&EHbBV~wOwcgu3m9)4Ns9pAp?r-JZ`QnEp@22Nu&Agr)LHSoCQhy(HAZ=%) zVa?}9#2g~$$QgL&@s~)CZm`0&wKjq|=0X1D99_C^ zogct1begZ7_=FL4acYPjRiZP(K|lauX>?kb%>yU$QKd3?w?vuT*2n}f+Op?t`>z3* zcjlx?u+#C)e+N#yB?Bx?j#ko@T=u+W8+gam>*f6JBHz32`N-L8_5nknN(-P}J?O(! z4{~MP9_#A1W(YPUO#|CKwp?e0vaN3uYWWa? zIVNB~DzDUmaf6OspX9duE2e3Y@9R&+i=muG1n3 zv;hygu-tA>SM&mka$tW=OZE^chv-u7DQ~<3#I*a5!eqy2*p5v@rC&fRxE@I&`T2Ux z$m%i8)VE!2vI?R)B^d;4;cY(Xl(pvvSw7$HMerFUQxdPF#NS05Gsc6Yze8}BK=cKb z!aKo7NZrp(j^-7d=)~PDtS>-(?FwCb*SPiRK1|uwF^Quw=pY%N3;E)x`Lz&h5fYdj zLJ{;rk`PCF=eu7UgN%xPcB1JYJ!Zyjd#ADAy&m#^VT93TWM4WZY(*plTECUK$o4?9 z!W=LS)^x5|j@9v6Zm#S$Sb4Rnj)V2`zjENQw zL$VN4gX!DlC^w-&y9|BRLOZhM3}aXP2xFLa6*KCi)bfqE4zwvUmkj}YYnT~w>b{A> z(0a%-RTF>OT2QN6P|wk>V-po&h%fm|9$m_`^4YRTicr%yi#e%kD5GL>KV@NT?=!6r z{iV}KOk|wZ;oYUa2VNx2wFKbpSeBi)>c2n{;Zxh8Pf5=;v*1UHz&>??0%tw7^VGo4cUW zYhkQg{fqjtg+MuPh8Qc=^DiM|UDpD|%&Xk;FMQ{s2=afBC~vRR*1szec9FnU@AKHN z`gy@wv|Od5;jro3&S_2}?8#Iny&HX5og1aLts>$Jzo#yZbu?@=fAXTXpd#5~!>TT3 z%IVOO4I_Pv-W6u(1RQH2&80$Zru%KgRyu!vkP1I&a+FdK!l_&cXVNi0v5xcYu69?h z+&9~demBiQq~2_4Z<)L3%(d5+fZ$e?DuX2fE*cKf`T+_Ua$GcOFa|n~iHf7zJveH% zpJnD?V3QURp@-!xO420S{>oaZOrD)miQPIARp}m;=2e}G2tWp}pRL4pBco3stA>l6 z;6|oHK4nf-`x(OZ{u6|-tB|J8cu_mme0%-rFDIg?P~~d3oy6(zB&lWvXyf5C0;$Kw7B(RY7k4N90i0}q+HR8tcTL$)Uu@jXy{t|FsjPtw zZn3VyP?b%m+Mt#@pa3dy&F=61g!jbo!9)uk=w_`e@oIb8vt% zXzip$k0pq|-HzPBO1rTadTB+t2NExh4}jD^E^fOHb%~tUGp4CFUe@=T;fltrXFXiQ?lbrf1)NOyk=38$C5HwRL|q;A!33H z@_)#Y{ayBehoKfb!6sHN>8$>qq%h8+O631sZr#&}4xhSU724ELm>%!U$(?+5g)qgE zx2hdAB@lR`0c!=gc~*EgBz4ZZoRdHA{ z&X#yp&A~cfd^U48bss`j$C@rZpk~x?1lHY9>JvH+491(AaX~kZlU!I?K6{%6&hdGL zc&}VlpMmXXFw@{?_gc9zgw2vrsa7RFgiTW5y?V!WcWMbibvGftTBVrKoqbi*3!<$@ zg!Rfta4U{Mo+Fa}Q6&dM)*_He-&T;te(3rqhK?WmTMzI6Xi)kyl(hdCpTw8<6 z;iZIvFCyJsQ?cJYPTE=Wfc}Q@+>lTX4?E!gKCxtBDaDa$PXRp-s4D3AKJB>3E!S+r zhst=4SbtTUJg>Vxbf`m46@cQVofIR&*hcKMS z(PTDRS4-Ns-2Xz2Lg)(QM2Sa+!9)+uQhug(HH+m@9gy+ySFe2mLAmUMl=B|A3e{0p z{Ivh+77DnldHsp9gnlB7zZkLV|B+(@bA?nFV<0D25y>B^uyJA7kvAxtf+IYg`n7bGGNJq?p zCBfQ0NXXX9QL(d8tm@pGo8m5GEccHu-}LHAWy?5^&s;QEMOy`wQVZ{89m|j4g4k_yV+Uq4f3+QHd&ITbJ0!1UV8a)QmG=x$vl4&^f z*|e-Cnu^Oqw%0SS}C%7`o@K8-iAD70oUcmLn7M zI{6?qr%le@t9TKXlOEn1H!q`v7( zz}fLOb!NKg%w_j6^7hn%qhP4!aqJbyj)o!|HEC(Dk3Tnh;#M8w1yk@z1b4Uai4N2U zd`iaOhi8;`7~`|Gz6dj?5*saD@W7s+G;~j}$DE_$+wt0!W1RQ5>y*H|?h%Ksl9{E- zXWaq}AD0%1*uOO7dei;~TlND_y`i*`VytR`h;KO;39_YfODNJ=mjUbwJLIHQMdo8D z2`pwPvU#59M{Uu$B5-DsBF32>;-6-hn;_i-(+>Cd5{|}>G)?g22fUjG)PpYDqCwJ? z0uV6bL9pRkdYuRqh?P{IPOV=SZ9uw;*bcMe!ZMc^8I}p#&5nDiD8&fyeM9y|#b1HE zLaCWc0=p`-;&Dx`@gAemW@UWE>*|o2>o>0}-|0#hgFQ zHT;wbzFMbR6l-9u86rE}1Sz{UyhcM#6Yp zsifMHV`J*EPJH5e^NU#0X^s4lH&*#h>f6sqpZmN7T@QU8yHr`BP;#YWyW)>F$M9c1 zJRZk7N4Dyx(`{5+iN&DxZm(fIhZ<&1za=a@AToE*@$B%^RPDp+0Lonq&k zV>V2JrjCUu*N#Px36B;ZxtC!Uj^7|m*4KBCPP2?IEpI|W50D14{dYa02=NJwdq0DIS4l9%K?xjq= zVm-TP8jk0bf*0Q6st6$Cdl0eAu!a{-sN=X3k~aP_EilXA17&XB&i?a< zi9^QwA-3Nqd+BaFv~MI=51^op?Ou7Eye63fNo%VukT07u;{XwSidX~{d4rPEdXm(!T;ioK8*w2AG z4U_A$JF)>#QhmXzUBH_#sdX3*W8atT3llG6X+tI|y$v%>8Y?;z3wrRa#LumtaIdgH zb&cr(dAhYQ#b$~ht;KLaz=NRhqnAD*?OlI6=#YSn134Nj2b|&T!T8gOA zP3A4T<8jRkTC6KsUgPUdoh(j%S`aVM_*1jq?mOQMbw`9pT19S9v?OCaDT2o;&E?Zf`u8B# z9PgD3OaF(W-TpJ!ope`23$j-C3V9-#>gp3`180+@cSe$pb+m5)f-N8fiVf! zs;4jJRFucR1ea$#=8ecmxb}pxol5;P)WjJi8-M+T3aoSVMQ6azgq8FP`Og-es239W zr*;-=N=fE+bGY3fhKYJ4ecf;`g>aI6P1KJrKc_f6pEFbcYgN4M?vC2}E>~5B2i8p^ z#ofq1_ky1UFp?D`i)khtRIp4#^Q{%FY}b8egydc@xgwFAHbzU8@{`4gt9Wo{(wZmRVvGuekT2`?3+??v_Wk_hYs>F2Vom^;ARFtXWbQYDP7H-Vw&t&*t>VKM7XAz;Tpy-}FE%|q~`pRkh+ni_ye zhO?$udOg~Dql7ibhFGPg*%v7@%`MJ(AE4X{VN)MKMGk}By_M2?KdO^FuCv);%y_r! zb&VT01-e%oHJKdUgl>wX8u2FnnliMEL!LSGEDahj`;|H>cQEVHyFr~XfZdxccVIeV z;)}3*%h8y0flK$?^nzBfv;CImNCZ zbp(%6#^_%_HFi3ZB;H7xac$X$cWM@>8CMv(r<+<3DWua&Voii*8qXQs0g87BfsZAoWoHh!3K7AJY7OKN-$GW*uy-}4XYJqEwkpAXHZE22%w;|?~28(*rYEU z6H!ILIsQ^i5WKf^?H6}1XlcTJP&=EQ+0q=huLSW9l|G|1%DBcM1g&!-7bo5ODF8Ps zfbNm$TAr}}ak$8k`LyxbC>|_M=cbYwyXzqSvUogfsyN)r^hqvc5!)NP-VVi9>tX>n z=^NWdIu}l=?7O#`*3Y%4b3Bsv!h=meEgAp#lX_oS0nRxbS^}^rDW`&DMSbWsQ!=?D z>a-#_n-c;X5mZxNf^diUW105IeQAdDpPQgjoH+-=jlvC@gUS z56t|oS?JZvbA-J;VkdiD3?p7CN!RDJK&n;&RW6s44de=!>+a&~fo%{1aXLFZ6d7CR zFJ|1cQE?!!L4Pw^FZEm7e@;xwoscSpUuL zVnD|C&Es+^S=20XWMtrfVg*4n`$OctlNvHB#!*Y$7e&2vjXc^Z`8P*EG*@JC(Ouj& z3A~92l^RDmJ6GP95MC~nkb0u6BY#iwG{V%0qZj^iT<`;o-BR@1r_19W7*JBt<-uVE zuZP%s=ohXPWumq9jp%0oNTsJK=u6s_Zj}u&U;(k0j1$a~{UmJble!pnCXbW3*d~`r zr@3CSnB&1<5fteMgYU?azgYUw9;S0GTg|%{EMLf7sYHyRI{ee*7pw56F#F0WxB_s5 zN#4$jrJo&;_QG(4Srx>?opGPXuNd4iaz3X3q^OY zh(8MuClCHs0IXkZ?-S(|2?%2E;g)}lQqS7omhT&-UPPS=WCaOJlE*5<&#^15nJYB${XLjp*QXUlqq} zOBWs>d)`P4f+Sk0NZ^@Ho^OYl9KrUQAF2A3Qr39rb=c6=O!e#&!&1ugvokhj%HMYu zzuvF^Z9YXwmB-3^Xgwz?#Fb9HAQBwzdOWwK3LS115jn;T$eSNnOp99_Ug&wqk#UKu zYix^I^qfz~m~p;kWZg9e&v|)#P)naPv9oL9&qjOq1)Da|0&n9bOSqb)G*@`)kHq~F?|a(fpmaj)>jFt5J_cGBz0<3!Jc3$Gmqz)-Rj zs`@;~({%mKR}G3o%#S1Tk4ZD=+5zR*tb53zr3MfJ8NKD``)fsC^>@!5m>E9+STwrw zKA<`632|uI|5=T-jtUvrk;b70J>4HMhtdb$7`5Kt;<)i*%_GuHkVE*yKY#8{+^z^v z$vpPN+1+>Iq9lyqw5ysmnbZyUe@6^b= zK)_M!9L+LgB-8y;Trt&#qG1Yq2K}j^i&u?RZi!m@F5BJz*+3HK`n0?vX$301?F*3Lm|9Gi70IyTRcNOjYlr|cYFQ7f*1B~jt zJNNRpQ!6VJYlP%2?XWsDdt_8Q%B)KM^lhwU7uxYkH4po5^NT|%3lD3na!mrU^#CV( z4PYqF7NfL~zA!TVLpxinkLiRL4CYLEX#UNrSYCMgau(HF6~~v9Xlc>8o386IEwa#+ zFhE-e*V3cFp~_Q(mIPSTKvzdrwnYF9*L#@sUaD8tS(|0xoD!VDU-@5ojyK^BpGD6i zkFoK9^^wH#sORiT04F*vy^omtK;6719uH}320@c&IqLXRuSs#$0A&`lrE2rLtkj(| zoI$z0dvVQW9p_K zFeMxRVJK6@|HV|MKUu|MEe|%j| zN9AwkZoi3F#hq*jc!ERI)i>G?msGM@a!+g_qdbPy9IAu@+l`(d;P?y?m!dY?ZqVc` zzj1crW$B73b2qeY-JTz@A~TG%xG&o}&~5Y&O-K7-x?B`$l!CA;NgsaWy>K*N-a|qI zM(-N^NqEl8<2V-M(mnXxt#e@{haC~I-#!XU&{5)A^1eS=O0;NUqNz|Riu}qs%cXp} z_P2NKh6yfx8h1xw3eVQB8iE}`aO6lCE-hq`wo(hkUkx5mqg6h-fphezq&PIxq4hfo zF#AR-#oFB$9-xxX^!j8pznvkyh4x@9Iq+I2#LVg+bCd9rN(UMjYSMMSe&@5#pKj4~ z@aWph4FAFfZ5OJ87Kgb?UzuxnskSGHORIQEB(|r!2zrpxuG*7NjGooaX!HuGZ?w7L z<^2I4RG-%Qd%{rDY*}mQ-)7!SVGY=jc=7o7#Hyo#4W&6Lrgmg}KZx!m=0FU!yLkNQ z#O1BQpQp>d2WQ63@pu9L?PoSHg8b;kg+YoOU!BOTm^4G^;j2QE=TAcZnbc6pl-xruXzCNG;M9~JZ+6`CbNpRfvq^gNfqd)o##Ja`^KuNejJUg zF3q?unu;meb_H_S8wE$z4R+stgpDJ8)@k?I>w4p$m5afyQp2p>ltL_ZV!>*40({3C zk=pPKSpDkpPL=YWdz)6j83T1)9E^L=9AmEbE6#Cw-`YBSw;-(Ev3KT_761OE3U)mj zN%iCi36;FZM{YYZZUQFnnoq{*={RcF{p539M=MR++grBYXLwz0eVc4DE<*UNQJ#BQ zX&)7!7fb6J{d6)3DNW)Slqz>PKgzK$ol!Z@4c%-PuTd(Ws_^J_3l}bHUSR_HchQo~ z$xh>}sAQ z^?UMQ>3or!>}B_gZud>k;UXfU)F1!Q6B6J!ROn z=o6=s!Z^mc$#W9j8xU}YMenh8(zhQ+qs@;%KB=^Mg!-pDW zQ_tK3*Ew`+H%DeDXJ*?gSpda=GcY&K+W{WU%~3a34ey)rr9rLY*w%s8!)4!}(CC&w<#x%U^(#npJ9( za9ovqYy$5y^5ye}#bS{H3=;Ppo#sI#h@zd~^n=Kk-TttETY>v22M|s^GpN?h> zp!q?&zlez9L29RE0#?S!a$gMt3H6RQM!0z`85OJ9qo7jZDd5_9tE%beqoWR3a<1U? zy}4*S=-TUFlU?-$MjufD$A1_<&s!qOWw7YF8eE^#EgQRmpQj?VZR9{)=ng+9(de%Z zrijzy&f~)q0G=e)>!j;n>E&5C2rce}H98EGXAvu?yZ1%kGRimFu;^y~r!-sU`9R0g zjE95ITZVjP9P`rllfP^}4nL7RpcnAqLtns$ATcpB{n>dXNQx5soU`fDGCq*1*;CK< z&H-%}rJy?k+P%kzMBK{W3twH$FBdViF`9`faEUX;WY`W&nhG&LJK!1d_0yW{*%^2W z)=ZifQN~ME?nJ~t-~l>K@QBpd)49TCdHWn^pn-W0F-ynL4ztSXNrn!gIedwQZLksa z?O?kJsnX+tmsO2F#&xy9)4lAO6CB7#n~g9@HhH`i^`hFKbXIS_<6~UJl*W5l{opth zoo;EjPzzmXP4!ikunBZDnc1>uf9Xgwj&T_^eBc$h`*=Z?)j}hHreH7B`##xzZ&mpz zqOra854uw+V3g?z{txO}_K)tXP4TSHulbJ>nc+{{bv>^7fM7*{eYN&|ici%4`7%}t zlNta2=1Kjpr|V*N2ZwGn4v}0c>K7olKmB&!;wnJz0Or-Daa~Y(2PLq5O!lB)({shJ z;$q8f4T8plZs%=T@Pase_4TZeulwWGjmzNsrnn15>?1EX`;yC1LBJ~N->4^!G!y5kka;&#NF!q)23K{56o4qJ;-cA*4i~T2{Phop>M1T5h9a_j$=_mgYa2fL z#*0dK-c{e`um06tuk5hCRNVq?zVE|drzB0EPPROB=mC_Ow|)!4Q^-_Kv2}|duDiS; z6`hi$WcuSvVIRPaIVGhEHpR^OJaqK2~8kB%Rfj~RH?7I)UhW(snR|0;5P_C z&270(vAh11R~qG5=T;{=j!cieXdLdBHd6;Ku1QmDnL9+Hc3oFLZ^vIxFjyZ}zwtnLIl)?HnXu4Ws%=kovEq4-Fv! z{jJw1p`+T_3jj_r4t3&u3w*rd`BGa54KWIN!cD)MCHx|nHrLxeKI<|`>#`Tf8|51N z)6SsiXnQ-**xEn)nVe$m-D6+VGvlu?GNE1OvNJKUJp3c7nD)(j$mJ1M_*qkgWUK^0 z5ChqqK&G3FJTTtkJWIbN&~?woy9x~Q!PUyS@u;55DZ4S&sVaxNOo$>16JqeY8E;+7 z1iZ6Al=@@51z%OxubF2(?B**o<38giF!TG^NcwU6*U+b!irbjA5@Az??qdTrY9|DC zOfftDqvLjWtnVdIlEiL=xg)JHJyU1BI!Ip{axX)NimbWoX4+ScltIpSyq2axilx*z zJ7`aKce<_hrhUV6i zgq?bKB+01Yj{7RX6_QUeW2Ipi8)fZr;ndusc*WO>c*P$TI*ZG_?jMrf`Mr({1oG>} zHWFr(087;w$zvDL++}S}^MIV2twtgACEBFEl&%&swut!T7_0B>e*-mDW*+Rhc?`AF zcN_dYsx!hrR>rIY-XYo6T?TxLedk;zdoeD^$joDbzbnK>>Wr;>A`vh*prjq?b!;IE z|9No;KIkts@}BWZA*CQ$I9?zV^ZoYDEu`tq0M*Gp&7G*}Adc0+~ zuWos1-IDf*1DR)SH7aLe9DfIMR&YHPBu&odP*KEOm%9w>_@k6YcRIA)Z|2P`j4n%} znCDK`954P6M=Zn^7U*g}Jo2e9DOiJ6P{ zu4>nYLf(hY3>9AHVN(Xy*2)iK!MYY7x1PsHl^Dqnmj!$bdLes)?B%tiXD*FfX%Mh! zk<_4djSzm2;6aJy#uGmm$D&`!sqyfbwBgJZCp{0N-n;(C7IlxPT%&bY7A~K;&cm?& z^*^bBHvmLLQm??jV84!BT+<;=L*{0wQ1vp} zXgh@pW!2ms-qT?p9{0To?^Sk6{5{b1fhGFzR9CC~+l{vK?Em-_f9LP&<`K)5?bHIw zqJw`+ivO+zKsn3vD!Zuj=^IV@TNBKX#h)kZw-`R5KxW8-Vku%$XY6TU=+9c`){?z9 zeMTgBxwXFX)hEu;I*EVjCxq)|aIpxdEHmh~yMm4&I|b zx_-Xi8*yXC;)GfOp;!;(ruA|}htmhPVq%S=XNgY5M?V%$vKf!de+5XUK9T zKBAujCls1IB{FZ)r0o*~#240?J;Jdgse#fZoFY;-`NkSf<>N5m95wwG#zk~omt_Xs zzcS#<^J{7`Z|%g#y)r}B;VM(FhTvGX^J8ItPN!6e27S`>xzyCcF+*Ce)t*&z%xGb$ z0KRmc%=brUZTbOZnFSZRD6niK>uNr2Nc5qy-5F8wOyy-5hJCCZQ;2@JZaDNs>5Rbj zLhzKQx?5&PwbFG-fM(^%6lC8mZ`;0P`5*3 zzM$^JL1y@abz?fek!ITdnzCq3xocLbCd3}2^x|Bs%qVR)^YoL|J52dbNhA2UAo%lQ zwx4e0SZjKJj#1SP`yE;_I!Z4}2sox5)_$lW8k{Hpdr)fyb0P4BOCA5SG5$buG+4nU z`R?_;44)9hBF?%sFG%y!?B$`jJKKNe($s`2MXN!Uem&O>9T*h z@mX`jEnh`N;w@o5aTM#YkC~RJPKN@4?yrI1MMnyGWqf8<#z|~#l$@MHb za>5&@AA$lWz*T5e|8zv&@RVet?XO!om^0i-cVV3R6!VLLI!8LGoGAwR^}5XqR*hT!9VR?}-W;fP-gEW;Nt25S?8=Te`9`HXnN zZ}lu>+u0;Cl**hRDX?N{zyU&L0YX;M$h57Pe0jcQHNA4<51pK&+q~!8S7%_e5%=gy z;Jr%nNYn`_>*RU~RAM|rCT(1vXfJVP%W!#!_HvatcsYLYYr8R8-O^+@E1m*7E^Xvs zckOpkIUg&KZhB<>)HO*HiQs@+WLU>i|Cr}$aklr+Wq9o5viO6n`?@NLZ9ke;C_qTd?3&KzKaM{Vfo1!Uw+00$F z?Vh#nZtIsSg0&45%Gh_UQ!*zY@1&+wnzocw0Y%uK+48J1lU z0pwZpk9^I1Xo(HeTg=2uM1$=#6V0!{1k=t$cgr*J4hAqG8n@^uXVmMkKg_L2L1K%(IVbP{vLK%tj`vaOD8{+j+C&&gAhxg-w$0$pDHXuW zN0R$kGiH^=dWek#rjt<(lzR5q<+^3-c%Xh?Q9MN!m=c|lOqtbP>D2eAHd9}&ZTt;0 zdW^?ypvH~~exlCJ{`6n3{G2>Cs+3N^ZGf?_-EDrPt*%Y*N*l{bz60OL*!o52th{U(u(3 zbK`zZmc;{mdgQaqW;Y$6dTD|p-3Uq+Li^ce}C1 z#vA;WUd!8LCkym`aiD{{M=4_-33*VfMUFjPnEhDN|;;`?;FR zC8-arC)Fe^D+V+x}5j${|cq=7H3<<9&T@q zX7vG$GL5d<{@r|^Kmd%M%4(&>C>ycoJOE+mqT3%vPXL}Bhx#KUlm}W1{KW-<54Qho z5_XcN@Nh_&nEOPnA;4Oa_U^d1eMbLZz12YdeLF?-z%6!lefk(e^zPtgq5)CZZ>qJ5 zYMkaF-@UYD} z4eF?~mhKI>?OD1u{R={PdrsVNmOf(h0C4S7y*s-QTKBl0sH~=lgiX@cNvLlo=|}T` z^KIo>N#j-lKH`)7!)q>{yUK~%w_Qd~03?dyfYEbiIXU}%>#FVY)vJx^qXTUbj_beF z=mO?#h}CB#xpij9%ky}5Vma$!=#1Jx$&i;9`2%-lo0lyUt9NkO)`VdGfVv2p?;-;v zX>G-!3=%a9m;%RJxCt;S?H-2bQPZ@!y!KKu4W8^~d*IyJA5O+Ky9QWipQ8GMPUFpY zNi`wH(W34yc*)cD?B(7EjbiUxpt6&{`XwvSq=*Az_~{{jVErQiuzHKP5(7nD>{?Ql z302=B4DWn84lLdTV6)p4oP9B?$ACgFhboFNgQ{;!WXC8K98I#Nx= zHa@d!rCOY}{u~XUn|m5iJT~7M0XrWs)(FxAa59cp3_nix$^v>2&Dm}Vw_iUuT;PFV zcJ>J_LRYmZx4!yQkK|o$GWkAp0g)nf#k{Z`uzqk%Dq%-_xlKh zDtySeLX*1(BYmZ_F4Fg@sHyTIhx&LYYz$&Ypv4;uUQtNn|BHP`UDBRX!4|$)t+Fji z=NdMxx0U?Bn|Hq(I-Hs0tzY?J$q9gLjos_ox(rJOXlMr~T49sap>NSOF-Hk7qJD!+ zfl9`6qqr+9u9uH#+1%ZJhb06#qN^c)%ihjftzsm>(}6HPRK(rmLOjKL<>C;u<)~-f z60-TT0fr$g44ia|U%2u~2~g!Xp(cNiH#!I++mv46r_-7?8Y(Oc6PK%qpEf3l z(6Xg5dQj(L`BB;fbBh|qOjy|!L$u_Bs1~SJORBvp{Y`u+qcJrx9>mNj!&0skJ~%V- zP&OF`p&i};IJ*g!p$)7;cC_TqO_!0lBiI+(olBl+;S zofR4j=uda)NGo2gLX?r&-2j=EwO5qu+3p#UIegBlUPryqXRi|qm)aE0C{!wsHr&`F zq=rPVT4oGtR`L&BcGkYg7X3aG5|cYXy+&O*TRrfX^~Jq}h|MX&!|>P2RmTofruj*y z-Ygr~1QeX;0d-~6CxuZwbj#~kR_h;xo#=K!(ktBK)-LgU>#=i=aT{LQ?riyjC0j$} zrmN}t=q_J-l;O!VxRwBN^ugj#H`@COb+ubcr-a;_ z870TlUA)TlAQEUrEZiAwr1YA`zB^a6Z2&}?)mmybTDV$G_xt@3`!+nKh+87`L~d23 zkjf+OGfvt@4`QH}gcI;l+1vD?$2{a;%SYUcb<#A-it*G6u96V#Vg%mFQ)n+O+h^Db z13-}^#((7b5BP>Dck_!S+1Z6^JMxh~Djrcn*SYJ4@}ElATQ7FiOFy&O<&X&x!Syg| zO*)3Y`IW7Gipw8c*$50%cj^&hPT;=RMb<1o4Sfl3=-MP@ZVlW<^Z6CD%Qv1Zcfx|0 zs10U&3q;Rvj_b{7(T7C9isdsegh*a-DvukKzEcHLOD`SLQJ6*xaML`g|s?YSBl`z zyuKWuPqo;5pzN}{J@f=!+WLM-$;goLZUUSd4NR7|nK`1eX^Klm{OdCgpS5$Myw`5I z>lP~}o>7Ouh8ov=l#0Nzy9$*@%ew%LKx}Aph{KZzs01CRmgtHERW^xkgeB05ex&pt zCf@GV0vf$q2hulE0Lp^az4gY;(ZcM5*~*{0eqO5u6v#stwg6J}g)1D0MVw?qlH7wi z2kbW)!`rpOq}2ehe9X_&b^4Jj#FJ!i6a+qav#mSN>JEADRC>W5GA}$Qzy0Zr)~`8H z%3r~hWIst?ZMOh&p?x+Q?aUf-x~punMq78jNIlY^uZPAxf$asRlRP(%UU)Tpg0w_j zS<*hh9^YCC4jBC`G(R1eixx6r-@E%{kiY%oL=64hySj3jvY_(#FeNn9PlAq22*aN4^qptZ#yL!H^1KoN&H^y-Ai3ZyzbJw?< z=Hk!Bwqh%wfZ&;T?MobfR>e0cBp=)zW~qFV{R*9@yInc*%t-&qxoj+tp)XJ2ff<@R5h zM<9+2$|T2>=-XzAhQT!C9Lr}wr9>wQ00y|U=dRfg?@1wdvus_S2LlZ^Zg8*x(G*qh zgmVKOHwqX?DekIFS?3J-_q&;y=9fZNz;=)O3frX+JDRSeGc%E+|o>17Zmdv z`fH%=4K{TmwF3-4t5ge3H%L&nMW#9KGxz3&cZ!sYo!#_xiZGbH;O!$Cx@7~d))Rt7 zeMPwndht#}c}Yqvm&veCz?l>Xr7=Yx~7lN>_e;-B}Ac6vQ5xWCg5m zHMxBR^)aHJy1d-BtuB*(v=}!=0F0NBu_@nzza>>uL^2&ku`c1y0KG0L0Gc_Fdv_WR zgk6XddB#RchbtoLfm2z~UDMgoPETLm;Jvl!s!%jjPF7i`?WD<%k$e zf9iDyo8?`sUEhT>r?sRO*$e%rco~m58nmyh@4ip|>jD7M=uG3sm-U8`=CVjTl;PgqosT7-1)_p<(;KN(<>h_(FQY zm|L;1zwF>qL(w>w1()R^*Ug+zK%$?_14#7I3CskuztSz(3)d%Yqrjjqv z>Lmsj{a8-0ze=ZX)>lwq6sYhzNog7$h>Uo;Ot6_<)*bZBd|Z_W9$<*bk#z;f(89>Y z`tMa|fDnkh$fsbc1eZXy>>znvb(w2FdB97?txl;pP@9kRR%jt20PFoRAlgVG4L7Do z)~r_14Gz)mqcl%Klqo4Aru5-yjONo!1jf=x{FY{n0D~|8imI&ifxI zu4@8;mbLTORTvV9ey@1(^_m#S`z6;PtxR$6iz<0qe#x>vb%9GNZLTA(VC;-ayc@YF z-#M}VMWs~8x;1jI?>N=(J}xg@00NkPdR-ndRT%}UpqfwI;c0IDp_WS1hZ3Br`et4D zMuoqi6|;MNt*W^KawX4{HdM ziK=YN z0HQap#iCSySS!_0Nvb@;(70a zFp22WKCT&Lhh3(e6?7`2-CRFt>tHZ_Dz?zAZJ56<3T8PtN=~fjU#l|>+%|`e4%0?b zP}is@F&jb_X<(z*BjT)^d2dL$r6$fx*x>p?8EBVNd6gipJ{n^^=YiWZrMo8Y2L_*5RxpHh) z`H1d{=p|eh95rON_a2vBy0@dNk_YF4Jf;)>YSbtXR^QSFcU?nuyjodtk2~7k7Ld9Y z9ebFJGrDLQI6MxiAJ8V*?T@L^=1CbW30Jpvt8$UKBHy~|enC(n`TY23s@7NSSFfU! z*0}{f&CHy=fxO;Y*X$}`Qz24mw`FZ{JouNldk32fr4L%VyuuGY^wZGhmUdi%ZA{yC z>@WZ{-b}B8G;Tv^%&k2B5hxlwz0&n0c!M_cW$(2*JxO+ZM!7D0?V9LK%D{MUVSqjE z8`l~7LrF8QF>tMq?XitIe&%TBP&ilDT1brrQa4|1&DzV=BBqVTEabI(n$2AYv35Ap z<)v0qcX2#@1s)sF$rvd{6Wr7h}I1*jiYwCG}fu>G=G>4PCuXRi67CcwrbrT03ASaP!AY z9Nhyvu5SinHs^ie-KPm1yFht&DZ{V(moU3qtgDk zQRNr2oOwjRDZvgxz#i`(8Edkb<*bqLN3cuh6Q|b8a=!3ho0*6&WxbB}8;z}7rQmP{ z7LA+Qg-6HZH#HL~JbI!>bf#>!R_Ib6(Fo&Ma{`tyzrB0M6E9bZYABa{1SHMOw(amr z6@RXaAhL&B?w7S}=0FoDz>IriL@|2vacaLW;rO`xhvo04C>zyYH9=>}&gXmdMnz-7 zfjPKzLl$fl2g;4hbJw@QH>Djv>T=8_+FBtWcX}}=AZY&med(l?K)zH9)AXGWR!goT zM01|3hVtKCqAB3r6p!uN-BkM<`7?uxGQhRS_%%33T7SS&pAb<||FmY@w{QE`bHTVe zyk0(>3zt1W;pMT3zN&6E;Lz^;z3SvAa|FKr(&~Ckf2>$)SBxZ*wHZe8J&t2L%q+PNb+||EnTz_r08U`$WD2dbjkhAQ#z{GApTF z=(85s5xh}(fm!n)x@?G5(-A74^~M!-Wh-vf~GXJ3o2gtr;($@ zn#<>P%#ykDmgc|64$i#(IQY>_aZLZ_yW`g4=k=4vas={AJ7qhhQ< z-nIPOrNd505p>~9Qr;J|N%hE{2&5%A*~x`IHxQ9mG4s$esP7f8zWs#h%~mc2QTgG? zHRYEn8<1ya^yI}gi2!`XxFD)GO%5Jgfz2dQ!kt@qQ)u7kntncF`Y2XlQn>LJVh6v)`r;&IleLCXas~U2YK9PY%{! z)+*}k+~PMkdE?hmW-a|^G3(J$GpCsJ>v7^&?W41+l*}YcH?$_l4 z=GGM%x{6@E(amBjRZPrv&fQPb6@9(DDx{bTBM~>lKRXaW4ltHJczoE5z1TdT_ics- z(spAR8O?Wj?EEaP-9$VSovAhq$k(iab95Pzl5sxN`HzoywZEad)Moc0kK%Y!8qPVS zns*0#BvO+KAC0g6x^ntr{N}B4E3&B7INjKy#p)Rxx4r~B&gi?jqZ8E`vQ)*iixDYo zF|IO*CBg4mB!dd2`pkozXg2k`0@*WU^#0PTL~%3GFLgdD!zZH*1+Kp9$vuNn z*}>q0Lzc;VdqFq1VQZv3Of+-h*@vqe)8m!`6<7hWa-_vp3$1M&By46swP+*dsRZ$& zg~hIGGK5`Oe$J)uvaWSq3Y5At#) zj=+khYr z`~;4_@PZR_VlVxl3*i6LoJ!hp{3-~Z8^7FUu0t2b+txZzJW`&aO?{Dnm0OPu(_|?7-_b$7=@n$S$F^?Q*WKyx% zpOfFtkh*XR?gTKI(Ku<~GB{u{uGd`&v_#MeQhH7iEMWZlU7B12&gH8nUaM!+zC48X z9af;O&uE)GaMniFm{%+T4JwyI6eUIx3xu=)BmB2_w`o6GQ9Uu`fYWeML3R<-x&*(6 zeWq06;~NC8AObx*(^ICb`9aUBj)MbtP+ul&u1Esewoz9#F9)DHY!WlSMoCxyPpMpt z4#hQwAIe$1xm4WPFqb<#WiUfBDOcwFO822I){m@1hk8;T08qS~zz;pwpalJa{&z>* zc*AlF!oIf>{-^fTJ9c}j!6$R*XMgG(7muE9nZ}~#R2?e&lX$c538(mUUmzDn|82lH zb3K_ak=A>Q=!V1tQ)Vq_SxnM9M#v5`4>z7J!t!%oheaU;s2aIruA@l z^b~V+u_GZm(kD%8Q0f8GMd!dH1jdGmMM-GxI$K8ZYc0Apz+#Rr`~!S#IC{W%JNI_i zJKM*c0FklluUd-Tqn*L1g)d2>-_Z^Apf$8W+ADV2bO3Yg26LUgYJ3&=mmXBTeBu$W zy>GRKr^mi$C<5Z1z@-Kg1-8DE8*fMGCuaZ0eC&Rk4pT`Td8)-vhPH})V(|^v^|)uu zW|gi>+c*bnhT^G3q(SX1y7nyu`Py}F)f79+R^g_G3wU$AocyC>hhrX5cRg2})4bkO z-Y5$Et$05d#5o|#z^8u;oLW%rEA4Y%DJd}WD}jW29)SRHVFqz*W_D9{0DoPM-oG%j z#eXuhFFR`)EzUoZ=VOQ*uCDLZc~rjHkd^HL@a2!vG)1Rq@cRnCg~u&?4ZzlYFb>I! zwkymPGK0f^dgJrP=G&{sltTnUyjV`6@N%~f5g~7N+}E@}nMaVQhMLj878j4PR|9)Y zggT6Eno}r(pA%G~sT4{e!RtOQ1<}H$#bOYyzpTt%bvK!Z;DE>B3ZZc3MB#AQ@aOoV z_d-(rMRt4F-YG8`{?}kI2Vdi^i17v>`D;BK>z*Dl=9U-Sqk#8|^(=vKH>xyn*kvif zZ%!u#Ti7vtf@|qTQ!{nSVZ9rJdEZC0$_kORI>nHyWx!HMiM~E{HkUEu0u7+4;5v*x z`ucOkgvgOs*)_m$%+SBFE84x`@o>L*BKxq z+prF9>UyNw=R>n7LD5&J=d=^iq7Zw*3o@bCuJ&d2iD~ZTf=?J(5+%y# z@IE`>bk$%}-tmA|0Wh;g0A<^DYB#kAkrS!Byw<|rL~r3zz2|l_VXTLU4OG;Y8cU|A zC|9jGw^^h&T4cw8Qte0CuGHb)y(N^7%SUYK9$QPzOer>V3&exDjVQ@-4%M*b<%MO| z-m*`R3Uh&h*eze@!mn^qs+U8Un5`BBYdLVWnRhKtZ8MI9n;M6%PBv5I*&q3s z+P7x!tJxOK68nTX8WJjyIubSRZ^W929e*;lm5NF;9hX>&H-hxRSOiv%bdYsEfr7vT zKUHbdTJ7MM6T&*Vm$USdz`k;~gn{*+#7`-;R4Q);?|g7$ebvQOPHoR*vPyk#?(#Zs zMISW18@+tS2I{|TxeeWFk_I_7N1kz8+z5ojQK&M*);&5RyM^;wl=ixd#nQpF4A z?%t7cR4%PzuI!P@cT)u7FqUQ*f*HfsC{aKTS9^6F*q;S77C3=lc_mtp&CU6xn!Z_g zr*prvY1tmoAwLsFx{!R{qJu2RxlqQvNbDghsnu;9qPD%&8Mhj#T4vi>l?>sG z3_O9XTNGBFtW$h!w8E~5#tOE2h}}OoQR?(C6Fj~%HH;$r13eaT&KqdsO?_i?VZ(GF0pP34xhE? zJyRznkNO4!uhD%o1`zjMIDLRkW^g%ERb*gQhRo%WLZ}> zwKqPL`BR1YsN?EdMgTS3RBz`O@b~Q>5{T)8W8S`_1NNp#w26^28~b24z|8>ciQJa% zO`^AqExBcKmM9AY7pLSU`(o8b%yXv?efoN<$+M>;RHnQ zsZXzI$)DnzR*n?1Z_=bl!2N3(!dmv8>e*D&JiQ?@%Oa=Gp{JuiKL2d^>g6 zl4rmiakl^c;Hw;f4s;OIi6&Iq1jw43NJ59|JU2J_rz z10<9ghwY2AtRp6he4)zdi34U5I_QswAD^$85bMIY3G6EQdgW` zYJc!0Dmrl?zmaYQXwoS#!Z6WQTsFJ!2 z+*y^#s#MDhQw|j^f#zOxP@foLhk0OBctR|3#XuZShhQmS9vPLJ1xzPB>AD0`*cqwb zAf~(lP}~9dTnM)Z5YYvg>+jCBkBthz2L15c7XKi-nO|Pk)mB}^77qMH00OQKv^hS= zeZ~$X6b-PH609K8o0cbc^N|O}Lug*q72Gusn^r)C`~vPQX`c1oR9o z-~=+-*_kZGVo9HcOmYuUWCTKD3uk(d217gBDw)#?g*R4nBa(Zxli%+4U#P}ym^Cep zfPP#0Dh^d-e+ggj>15YT=BVYw=#Oh2eO@!V7P6a9dL}8o%O^cYVlP4?PS#?bRlY)U zM&_>DvH63800~p+bLBfQAT9hL2~}sVq}ZJdOL?SwABaiKp!VX*tU+adU2bCY<4<=5 zup5(H`b)thQc&XcAxdU>nEy7VI#YU~cVGs8{lQI72(0K~^KH8Qw%tp(QW=o<(>svW z#BfNw2^eLOWR8$sCKceksG0nbUXNpayoK0__8a>nle@-pXFYXjb67gysrFA(;OXB}p}2E+*b3@kE4XH*#W<|&Dbm3+goNqn4@0hZY&~8X;lD+PP8XG z#bd*X3QiQDQsLq7alT_8g;PEe53}xDlp`!*4eRQ=)6BhY0SACSl<2m1wR}JvE+SJF zT?P~>2@d3P!78mTrWFx;V?NAH%MGjzh^p;+W5=?5xw74@p2`j#l;?(QH_`29THIYZZOLZ7|-J zgDcFx7o{4>! zf7tx`z=x5%n*0Y*8X9`PE^}hktx^H$R=4Fc&BuoQxxbN8?$BrcG*mOr$(X3YW&_En z0MXHW`?Gcez=7#L0S6qVt`ZSlwWYbxA*DcLss>HAjL(JCu)Am6OopO3hz4$gH58}~ z320&}*qG8iD-9Li4$Tclw+ulwT!n!R1d>fFZE`7=tdvwHxRqb3UK-FSXn623*H`(&(BVTq-d-<0zymw08bk7(Op1BV>;>AuKZb>e+AwO09S$`(?GH6r z*wHE;nXQBi6{=dbLvv1#(S%qGazEI%oxzTttgSApdR1L;;oAvnJp95TPQGA)Aw-Zi-$9|VwqAcl1vaX1Z{RDC3U{b=5smS1dR9!@(~2GmFM+2UG+;Tn!-cFJ z*?3p?2E`-H!JF^=#-pQj8rkJn=K;kq3SfLbo-$e%kGuIc+y44TyV6u*Cf>)}c=kcn ztE{yEP)YXt8Qc`qLgX4$7+tqqN0&;1%wU_ic(0s-G`PQ*NnOb}5k`_uV|h4n!|cs;0G!rwl^!jVC~rClY&X+ZE!mMm5v1 zC+g2z+Uk6PzZx;6n7Or$V&vG*nYrtLr!FkXwX8T@c|ggb-9?rj!$wtXgbY`(v*-rO z$}cJVEU5KoCGj1Z9WH4Q9pQF%#J{iFaWtqCDR8B^l<1GS;n{@0-{_f^XKTiqf+I>7 z9tZoizyrlj0Rf_C28qSHliw4&zA#1w@u!&~30JJ(o+p_9rL{q9uMUTtDSJX^$8x?( z5ekQ^p6Ix-Je(1UtV~xFvY2B+1h3)Cdom5tr4M4$EfXCn@MqI~tIPA+Px5H{dx{Fq zrx@b`a4Za_aZMZWE7gpzlZsjjQf>X2EP@yu)}21G#0e?At^ebMK}bLvJTMW6t{Vvi zGDVN+qqRA$kqO}fFKMzjI<%Pg_ zJekV}8-ebM_!R6q0zA%MY`sf&=^_@#Kx}U}Fn)%BTZx`u>$YY1`C>DLoOP(p{Os%g zmg~;)n|@#+XBLCJHJmf}(kiB8*#B|M{-4Isy?;14lg|GaH`2e~(?E6rWv>u+ofo!H zzPC`mhTkKyezfdHAy%c==xQu_{U+}$l{aLfiE`tdrF$1p#eS~=HgDD@BX%v^5>vpU zpUf#T-9nn0?));g(TJGQhR#NcVeEw9<`AzfBAtGRi6k2XFx*&M|$tW=)45s}&J%37GJp9uRC%ni&WW`IYn z?Bu`I+~DgXc&9H0-V?#^T2d3t&el~X@Aj|)499m`5XAOK<-%W@N}55yk=uRu>%Z09 zcw-C2?5#m9hb;UI7Ftr>JZ)N;-DDVbI#GGOBXjGFGhT+uxisdMo=aaBG=a-9zBMG^ zb)tdL+SLFo*ivxGkmEOiK=fu*tB;c@m{J~m84j>K{$N9fqn?`T6;JWz*YZ%5JVz;~ zhiB5!XbIv;Rzf;a$|eyjyJyZqt-sx)W?~wli|ah$Yj#LAOV#V@zH_o~f~M;DJOm-h zkeuD5Wa7@cy*(D_K-~%f4%;OAE(Ggj&el-1XcwBnM z05Ufyz%AcCipR@veS(>8t^gAEkV^G(hM=$pQ{1z=(~ zdc4ASuD12+{f?6JH-Ryi;tAZl8ke!VVT?q)fCI3!Dac%+3p|xmiK6n6E8X&=8Qwc} zlyJW|0&<;uV#9}DDbzrz+~h(7lQ*m%3!3JMcxrtJGgGrnxTOE%cW{(_h#a=kQ9F>w z1tykSNg`zpkQ2*SWp}4~8g9okm0-*;>|n^lp>mD+pD(yduuyqJ(FvVF$#_iJ;HXKEq2f3-56@_V&V#qsri};e`_zM+Ojh#3tjA zZ(L!Sf91CsqS0=F%$yi+nOj#z<`!QWrtc#o1XQMbl!nWn43*>5bgigS+&GNP`L&kK zm&ZFL|I5aEB4*XF5*{zV{xPYvs^c&on3f*vrqjkBpg7n$6yAX9dQWp!xtR50oD=2c zXzirFgqG;1GP)Mi`G;IE>}FEvFZJ8&`{k6R& z-ME}*AfDD(G~XiJ?@q$V567?{P&JZbcm7!0Y<{wD3|iw6IJg#O?!Bx*{4njF+qlp3 z!rg79ot=V;f$RZosr)ySzOH<+i`G7^4jt`T?`^lgzPW_Y=&VltEa+X6F~TRdKI*r` zV^lLXfEk!QB-TVRz0sd89^s@dRh!s9^2{Q=q8!$Bq%&l_zV4e;(^Q&M4AIu`BzJu7s;#7gh`aDjgH zb0;(6**jlU1r(ksVi+LG+JR4axK%vTSX6?LAsQ-c$#ag?oIS*$UR>#*_q<aUDj$&7FQ02%^*W=*h)F1+9^53<}R_g1zfp2vd`uJN9dD3YuCep@i$VW zzFNCDaL_9~b4d;Bie$VfWsF92S9to5Os+K&oT`XbuQvX{MU@jqdOhK1B}ZnleC;NSGHcf{x9l1z1y%-W$Mp6=hDqm| z!OdXytPmrT{Cc$BtGui%SnR^5ZV_Nl-@gv8O87>NhLp^7^PLZ12F#1!zD7fk#vWRW z@M2>Q*mj@2)ukg(7aQbmbF7MuMzyS|hcj$*Ae6aA3@YZD6*)C0GkJc+5#okCjyZT~WrH;3-KGkgNcr0AV{4A!AAv#oDg>F6E>?p@sJ``J}W zZ*4R<=?z<4$lu)6W&??SPYUEu_X=0-tY%&E{ed~2}lIX~bz>S+Jx2AG1Y>x#$*+Q&x#cRJyz%I}{j!T@f(F?d!|DO#ts7Qq4^* zn5_#{knHuog!+Ua=?{ec5KK-fsz9VO%vR!Jpf;(hNOltb*E6;~(CWo46Zd+hf361z zmslZW2=9LGym6a01%hBAN`Mcuj+ac3xJ;p9{xNw+f+D16=Q+L;5Dx*!$#QPO!73U`4V@s&pb{~ zhDz7HH!(^nr`V9!z?j9K_jM83#3AK77=8SN$do!eCu?My{G(3+Ahb@;K3aI>KNc!i!YCPN6u24B z*3Y3IKHI$v-e6EDqQ^YV36j)x^A`HMxx;O1jqWq-!P!sn<=qN{Oux_S!w$!K=`O;fT3at5Ae53wLUL`ZbMq_kk4jt*FF?o(=72#Sc^C_U|b7y!wnmaoNJ#5~tj)Jkn@=+Ksi z0ApCO%P~qEiKif1M)Slsz#IP}&%TPV{D6p{9lnS#Cx};?V$XBB+u|roVL7n`-l=RH zp5IC5!EP6oUzZx}Pgi%W5xCbxv_M>u`kBUwCdgz}a>Szs{Giy6K)nXw-DVh%O_kI7 zCJ{OlDOK5HCRPfK6z8{2StkT^N(=mA?KxFh*8K;}n+l59XwPwya* zD*bsVen`;rWbQ~Zxhpr+vU*FJ?xK3z{Da&MQMo=2|M*g>D@Zr2$4L*=F|+}BL@3>4 z=v*6dy;>1|kQ)N2mr+`(2-qmM8rz)olzJYd`rPcZ(hgCC^ox2p2J>DT{q{Cj#3bkW zpUt+bvpw$(fh+jM0Yg!o0cIPA?Q(!iSp4vhxh}%l%EvW!p;R>OGNGQL_-+a`l zvs(DzMhEGLP<f+q;bdXWCv1fk%9z##3tJya4*bID^JoXpIfIA z;9pkPm!ee2nZE8da68ft2OqF7rtHjG$d8JO8edEuy$GB?s1ClDKTioEDea!O+wt`^ z!IYcZGGqDc1>_V0jf#D^?~T}&X7KG$UFz}Dp(63V`}e)X--|v8;n}g+daDTYc?eVS zd73SP@Fk37-#}Np3c2=6s-C=%oNcJCT*4!|NaRu?0#+tO&#XMQanX_Y%5 z9i_tEbDxLPF8vp1@G1QS)sx+WXZ9rvz5Y-2`P^LJvoHk-TGDxU=nB)qkwCR=qDQC z>j2Q6i2J^mNdiIaX_&>5PUcH?`iNIXayFUfTPu4YQXX$tKMew!L~K4bue)rebaS{= zS2Z!ZE-Y(P`d>e)s7=1khyapR(lmUx=)|llL79E8*4)X|ZNA_F&1*$jHtg)I*t_=Za_)%P0LyxkMKt*1oK`@DZ&gD3ZZ z9X&@0Qol==Zd|~@Gn2Ne-g7P5HbsFm>g4E$>s&R(f1UlZc_1{a6RUrc^DrU5RP-E> z;iW@aDQw<8aA7ktMXu1Isd%hRS1Pfqglo}cZ1Xrx!5s~5Tg^IopLUhYdPhHoDZrXg z!6P-WhBifZUriy`W>u?O-!Y<7xXI@)?IIo^gj<>vpC(VUO5vEG--|q+pAc&GCxy;X zkX%!_*!TX&0=HTp4zl}@yDqpZ+*tm?>Zne6-ZmeMOln2?=r?jZonPJhM8WODN~pBA z^0h5kF3N^)8~!!7@^$@j65nxxTh}8{`aouv*KJ;D&RuvVrG74C^dj>1w^=mWJF7O^ zYrC#Vndlc&i}(mIKBqEV|K%OB_aM424F+nEvO9$A0VXsq6d>yZJHT({XzZ9ED~nGL z-D75OG(7jQYFBu!)9eSg+d&ALpA~U#d4$5cuLMxSKaq|?^wY*Krc<-oSIQ!?2NxyJJEJ_T?^2(;6_u3hV&0`dl3QluRn zEdGcnxfk4XilGAv-U!bdk8X9GH5<1+MRu8Qd`SA{|AYSU((pNL;4pK%yhRb~&=Ui{ zhO0TQ+HFo)8!PVcI&=wHK^R0nn8vkaKd_2r6#R$`#QwWQJDQlx?(||NCby~LQ;0F( ziZ=YOp2R9zx4Gd~Pnb+tjh*0`gabx zOI3(`XFQ*da1m)$d00im?E*iB1RxCM^n6;g%uU8|=+Nz0}igVepfUD_mjDAFJ zZyJ<@O?{}HoJ;pg>N=l1?l&`>iDcGvDZ1J@k$OBh`oV=Pi_F!O?aegi`hXL=YsYdv z<}1z_F=%k7Llrxf*~4ual+K|zu=qtpt3fYR=Bjh$p&hTBuJvCC$H$EYyzl=#J&W;r zHM`2EK`5$5=p;8yS}U2UYc=hDJk|P+8qr9Q<6c;QyMJiQ&`9wX$8T*o4G5!?ES(nQ z0;Fl`c}Y&=qm;a4jU7MUv50ttOIW7yqXMAg5}MK&O5`Kgo=Vc_8mvR;B{W9t zQ7)FQ>odQ|G}IK z4gFK_!fRq6n{O8r8JPXMwUHQ|)bl$pF_)78p`RSok;>D%n!Rz^cqhKb%Z5p$-5){I zPwZCsB$H=T=GWAieT-^I0CL)$~$ z=d}y{t#48H@B2EH)0{;4+H?BxQy>+<&2O8yr?qd_xFYf+ z#0$6d_(2kV4nYzRvs_}kmnr6M^Ziqh^&h2Zg=vSN+s8L#kizeMJcLRIg+nRi{b&sM}nn1aq5a@vYz1@!>8}KEnJdC zMD3qYP-TaB9R&_WifwKi3W|n%uB$~Uu%9`$|7(NgXcrzCBdnwx);nI<)RBo6Cqfr5 z%Ck=$(rsz%xqnfp=u+#sClXr5K`w-D!nNkDUwwm#VxWBO#gUFK=C4Oj3V_-cXg%SL zwqN!m!;n;$?_(8(23`jL7w9nJ`L1kh&Q^iXWV~QeK!}K zTs~rWq+Lo&Q`C3XnLN=+kSM>l@(osDPUs+{U*p+ZCSp+oD_wn7mz~J2HhoNGqq;k* zU))myc5Px%o{V3$J;1qxc4|inac+7x9C6Ui8e{fx6Gu5UhU>~D_ia*Oc6Q2DMs=Gk zN(5TY7+QB4fVx1kO9MAF1T?d))}~BLa0geEUE4vcRhCC@^6Y!5Yd2P_q=d?lpak0r zZEZH0xvcSqFZ%@0^KEZO3?04+?Ik{DKafU958?8cLkHST7b_`NhHO3d@xa|1+d@FIAHGmdliiCmynV7jTtHIlrwbDbZ=Nc;EiaPp}r>C8vS2`~q$_ccNFHY0w&~Dl@AWYOD2Z z({A0kzCf$LFL(F|Yqorhe&}|DiWN-Lamw(-DLQb>pC%BJ?Z9ZG-P)=A>3xw9WLeA! zu-~;8cGv#y`Z%FyveOL~9BKYY%{w7DMxiI@cU5#dl6^^e@&}@uc}5B$Xvr;#^7Te> za}Ekhd$DLU@x$MMjqJ zdw=T4?LXkmijpc&;g2IC_eZf6BUS8{_>)U|!yiOLK5_Ah^kRyy^W?@DetX5%a|KfV zu0r&}ZQK5uS5x;B^^PWdl-RuhAKBmNkAiR%pT(RCEdF!9lwB#KN-Bp7y=|ckX6`y# z(enz*aUkN_V22w$5J3APXpReflL)GG!8i zMztqSVuxB&S}{G$7d=8$e^rC>Z!9^4MDDr9;j1gH3C?qAj>Gef*t16ih1g>XPgX-u zR_{khw2WVW>3#7h&NIt}BX%OdRzLOk-FDZSuGpebG1YEy{GWS@Vc$>F!ZXK}{GxnJ zxa*O;Swn?#xfU|~e21~rIU)f{3}y`%{E`(X(H`ponDvT>aqFr!*O`fn*%w@;M2XsAKKj_%^7{B|Y zU-3mZclY`?Z&i^`AIGq(-=bBB z`sP)(D#zrVoO6zsx6Kyk6FY|K(KfhhHN=l**XF_6_j(%?_YPu~l#X0BH<>uw>GhoA#5sp8nz1pZU1T?koReCq{NON_%hK z?RfPOogvMovltE8_j1S}JK?qCPcrQlk3l_!d486&jqFgJ%`Dod1JaL_(*PJiYInuA z1=bJjF{In0;-9eGxjROmY&!=#o5F~ZDcg$YNXIO01>v6@aWN9M&f{$=mT1*|wE3Y; z5Qtkx(`*pAJJtD=bt+!2b<_1m%zNwtqSmE>?A_JK1d4l@c}FE(;xyH!(@NxrZ-8>1 ziflhKKJtboBCljDg!P{?=oyr>lZus-=~05#+SPHgO*|KRui9p@3vPam%A?bP5Y&w;S)5|edvg-<=rpM`ll1D=F!u^Yc(g+dKC>?QxWs0Ib*31hPbM+2fX(|7>V10l;wXQXz z?_Qor51CQk8o#>a598M8t#egmRad|1`sSGh+NIgv z8HJOwv+>q2lS*FGUF-!2g^#p`Xv`hqn1pX}5nKC78{$kWzul~KuKV112j=76^FFN| zJ7<3V=@i>(-nahAIa`^;7wYSF`mTJY7jHBt)`jbeoPT01U^z0r7L)$1NtuL*pmjGQ zw5fMEr@PY3LbZT#BG@i_MJ0lyWnj&2D3GUt7R_FzKpiucp}uL`lY#lip7Ti-nX|M-Cp@*Nw;8!4~BdEh=JqAxrS z_xLicV)!N>D8Es8T7OJUUs;EO{xlhc4>N^EelmUjd;uPJ_+;N=oHyUi%XnI~b#uIbyC`3w9vd)uU%Sg`@J~uwP1{*^^wnClp7>zBGQd1+7 z_NYDk8lDL*Q05NC2nF_}|JbYwj%5N8E%Tm&&KqCdP z_nEJ6&rnk+4>S`>#>}@fi1*HVWfu#bJJaTVxxU$7 zqt<&*zn<*ck3L2#`NcdVT}6tl13QA3tX0G2`?#KZ$SeOI3yG;Kberae&Hdcy2@v8>Vj zfGAqJVhbySN5fIF11AJttDnyJAgN_+9%F1Y{EO-fSTm$p*bC_K zw@E*1b8yuZ<|@T7!ex6qFWxg7wQ@Z0T);xsrt-b`*dkIseK0zue58@`@Wc(puVhs# z(2+{`RW&7>G&Pl0%Nlly(oMs{Ba#LCTUM8nP9OSWS~p@1T9WpSwgdBwEmfnzcceHg zPWS#IO4gTrcV9g3PH>#BhgT}DXRb%K=Rd|-hP`ffYv~_S?SPU#dt=W=0{w@Y%GruC zJ-{b8`B^$=NaCiSbZ=C1?m}1Nz;P1`$ghf?zFw^)V9QBp#N*MWF$=zVQNv$Su0_+q z;w`Km#$P&Z!>vDDaLjKH;4RRXyeD|0fz%{>*OpLr_Zb5VG`dzJW_2y3_TadIZzH4s$Pbd#iQG zr?)9AOOBv0Gea6ZBmL{rEj_2>OfK`+3&+^D^eO|^Gi5_FL( zv#)>1p9KuSe|=-2EU2;*5)F7Z=~>wl<3vH(_ISz+RrD z@WjNk=pD#^@LNN#;7Q1lDD%a_SC$D~z@;bT?dd<`!KBZZ^vU>mZj;KJ+@Es_BQzv~b<} zd#*<~bzf6(8gEm2?6ze=XqR`uDA!PAP=6yebOXYuxaOC{N|c-;h-_4{9IJoy_?tb( z;lA@f-yVm=R`wM%3n@&(-QeH4T6ZcdnFE@X;IU~3SUT}wf0O=Y@|yzEK4Sg!=w+gT zg>&oXD9r1!=l^8O`S{8^^#ZM|MNbIXse-xIj&JA9b7qxvRmcGKWb~Qt%=~XRt_j@+ z25fu&vmMk?@ZRnwVUQ$aUG=xHM8lGd8+-3`*|!g1ln<*py7$}5jzbUruGq^Cr~ZRQ zPn&T^Ph%LVd+1Pl;O3wNh!61aX}V&2hfqbjH*VywdcZz6?T819_-(u$V<(Pk{^ZF! z)M4q|iV8g-5mNzM*U(rZ>@~hmu-~5TLi(orm9*k{v3K-uH=4=JLKUcTW&QNF^UWF);j%>kP&4 z$n?K+o{)Ecghqvi%#I?<&&h_E#f$4lU0gqWfKr^b^N}mstO(*>GP&V}MtM*#r~BUC zU9RlLwW)WNX8}zCjUrh-c$JXo6y=Ck58YC}1OD;>p|74ESRQAs0o21&g#Hyj3LEEe zcGvkq@J}!IvZVIOU|dc>j>|i3G6kFZfxp)*)m2FIqn>v8+2->UtO0788!D;`ttIiI z-D;J=K3CsjD({as1<-4ozb5}r3o+alnDWr6ONlWtTl75ZuJH0<`sk%l=?x*&jo`1r zU6dx<$F{`jT%HV`(*~E9!dN3WAmFys2ma1^gLWDU>W_*8VD1=meb56`uBW2k-&ss7 zyT|zc-Gx(SaR$Q`KnKSYt;iCWNT%gBpI<0PjOlg-#Q z;KZQ<_Fv+H;D$Q>ujB2mBptUsw>L_k?bGmusJKY8X(|5$`~Pl^oDHP)o&g*f3gKRv zJzg8w|2;3=ETzw{o=lnUp1gM>t{Cz6Hc(e^ZjthLv3-5pZ{M5VUliTm-j2CZcgE^) z{ms4h_bmf`SkwIAd7}r-E}80q?N$*<_B<2}rWsA2PRDXX)ZA_Pj*CVAZO^CC@xLI& z|JtDE{`Wif|344RjNrequ?&8a`hYA9_NW-x9Rl2Uy)0o;YBd7d#85>AuKu+g$*99t z5Z>Td_)9EHRL@=!yU+g$p7{IZ=Dj`X2kq-o|K*MRpGIolq;AG4l`oa*sf|mENB+LX znfcL*rh`EnM!hg?S!pPySVWs?wl~mjU}?cs7jmCwM|g0AVwb`zf?*eML4UJ_AL<^3 z+BUl7;;FIyyH)?hfm%8FMbVZ;&4H_oH?fZ6UXJ#e$l`y1M)0 zDT;#=gav@h%KVQwN{ufWDZv=I_Lv7VbzC|NGSF8bQ4WmsX8eZly}fr1aB~JrJ_yBQ zP!Tni_wqI7cl4wvm4iiOanlhr@(w0U=8%m_y%~dudHaN35&|RZP|-zZP?YFUMRF`- z2&+)GQja!T8(xzrQ}%tLFmEJ3_~?ZW@gRAj`$ViQtl_&cr(7 zB=XLbJ9cx$b{jJ+lA3qh3F@I|R`W&Q!j2h&wA~IXJ;$x2eGeQV(sQpId&EkjTuVxL z7^kbDLDrH30rO+bN1O^7nu%%1rK**kWf#81$;#R?-Hz4F!l`z2dGhpR>Pz{Nii*;{ z81~88k4oQKEK}tx*J&|Nsdh!br&CH&Glq<1)%=j^r1jDS;$us#7>{3HoEC7`5a!J< z38Nc7z15!2%n6geiU53Fp*FNb5QS`WcY4h>q-_#79v+ke#VSf|)3FV!0 z>2V}o4SZ83dK9W00)X}u{5#w3A-$zjYU|fMom|1eQFUX<0fk0Z=|B7xB&}<;B+Z6Oe7Krf7@$&Y2o4taw^3yH8**ESu^aa^Zz`e(|r z7&h*XAJoIPvI|=4(>*!AU|5&08;f2mYRH6UI~NZwj4D2bZ|NN`A5E`Oo+tp>Mg*cW zvqCZN7ySE;%XyCAO-2JH%H-GBxe8z?vqtT&68j^2{EA&@h`CgGARLSrRUJpZTAqt@ zM=x{;02=BBLzgwmvryons3y$(6OUvm)B3fX@SSdVQ@g~x-$7ts0>9{hZmcb}{h;Lg z)$a^HVQrluo)+jjx?W6bLa*^>`b)~;6KY&iYPR0EO5l%h`1*Q10`N z(W0(^9yauu29)_`-~gxdGuR`j&WM>LGN{`MD1~_|b#?mnT=KJqshE`ycFF*}cxo3Y z9@)RC-sMqg0}wWxOLo}3cq0f4;140K?&t{Lb?R0tK!p2(w1$+ic1_n!Ve6$U`kOSY zUH%IJ`R)0oleuP2{&=dJRpDD;OxY6iy7P--x-ax$e5-#aHefs3SnRX>#(FsX&S;a? zBY(~>TgVncsxUvBZZ4(~ZW{XJPV-2RXE^n9g`|L94`idtR?Tr`FkvX;x8fI;bzmYS z>tA98)!YN5FXc=I!+%24E24l_bnM!6MiY58_QLc#cy-5}k%OyhtM{jgCP;QlxHkg2 znql-QmhSqt*u*PFnlqlG({l>$-+ks!MlCNrY>;%OU&nY}RH)m8+lj^Eb7tk6k%-3# zrL3s(9{068oAzy@CMC_MdUlnl{}uX{w@g2c7+S_NU%7IKwYT3L-Q#lDxRdp%OE8oA z78}A>z3nTvyoWFu)hPz{p@f6^M^^pZk+wFL@K|8{AQRnNp)qtsDsh+PD^Mj&)Ur$K z?FIu))84J#qRDFHj^KCoR4K9p@2(P6ptmn6FrM6l05I*g z9?2%|#P;7AkoQ{Ahq!yX+AWRhXhP}ze{gZ4>%fa0&y&X`ZnrFX;wT)9!RVd>aO0)s)aX7xP~=abS?7QNLmKROEKOPs1u zaZWADRkJFce)cxYI(%>}9qU~>ZE99t)h;1FGOcS z8h|naq|1r9Q4z~h9Z2G!kP>e$MBtejL{IifY>@En*#L_)edQfu+lR`Py&2Bin_;Yn zsv6e%%Pg1Be z)`GKM&$P-}HIgDcOs77*nR^5HR7Av# z*M`T;er-F_Cnl3znC?W8aT=uS_;{@K`goxCMM&*Zmt7>eFOLoRLzp zjk;=jQOv1K%8;%GitgxIWb_zHEGeC!N0oU#PqV+JXuLC$d%IJC_RtwrSl`sYrDT#M z`#5O|Wt8ARF-vN)`7%MgM-LPpm_wBXOai?EsK0KWopz<`)Eb6)cz6m0Tw7;whterY zU~d%h=XfCqf*YItG*`XL;3tdlkwT2!W*Z3%jqApSV|@?UW22S>B8z8R4Ot1ZHY8z%{D|o=e64c#!PFKA{d|7^RRH~7 zrruYlT{V9*Q#fKULaFc#ay6^Yx7V}{O^;zQK!3WDBMrHB?hIc@$@`xv{#1Xh^Ifot zVu4@pW-DR4&y%KOXYx4S`UCm-O=U2sfJK=1f$2JL1`KXuV>=ZfK$*mvg{~IzHpBRqFN!kWo5Q)yo()CB^_;a$pM;F&jy3yMv1Y6T-8v(i89CS=zGe^RN7d6;@yz<0F= zSX)4l1wy2T9Nv^N0hwugC3gR=Lu<{GWrIdV5tZ`6Sry>)FB$Z5K(rjM(N}byhiR6s z*#cHgfd&r^n>p2lYq8ff>FfzdcNxV`epSYO~}YUa298gBFq?%J2YkIQcda%a=f73)rQcX#tJ^y*1mrrErYkxe@@NAT%sDJP z0Q{j))>stL*iBT}iWC$e{BWo3-=51HDD#+|UQX#QM&@Tl*g+O@@maCa4=KlTVquz(o*nNT% zud0c&Kl&2%yZfI%7h1g*?{Igob)Cls Lk18M9zy99<5dF(@ literal 0 HcmV?d00001 diff --git a/dev/sas_token.md b/dev/sas_token.md new file mode 100644 index 00000000..48c23148 --- /dev/null +++ b/dev/sas_token.md @@ -0,0 +1,3 @@ +# SAS token settings + +![Alt text](image.png) diff --git a/notebooks/azure-blob.ipynb b/notebooks/azure-blob.ipynb new file mode 100644 index 00000000..ab911574 --- /dev/null +++ b/notebooks/azure-blob.ipynb @@ -0,0 +1,162 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from os import environ\n", + "from dotenv import load_dotenv\n", + "import datetime\n", + "import requests\n", + "import base64\n", + "import hmac\n", + "import hashlib\n", + "from pprint import pprint\n", + "\n", + "# Load environment variables from .env\n", + "load_dotenv(\".env\")\n", + "\n", + "if not environ.get(\"AZURE_STORAGE_ACCOUNT_NAME\"):\n", + " raise ValueError(\"AZURE_STORAGE_ACCOUNT_NAME is not set\")\n", + "\n", + "AZURE_STORAGE_ACCOUNT_NAME = environ.get(\"AZURE_STORAGE_ACCOUNT_NAME\")\n", + "\n", + "if not environ.get(\"AZURE_ACCESS_KEY\"):\n", + " raise ValueError(\"AZURE_ACCESS_KEY is not set\")\n", + "\n", + "AZURE_ACCESS_KEY = environ.get(\"AZURE_ACCESS_KEY\")\n", + "\n", + "if not environ.get(\"AZURE_SAS_TOKEN\"):\n", + " raise ValueError(\"AZURE_SAS_TOKEN is not set\")\n", + "\n", + "AZURE_SAS_TOKEN = environ.get(\"AZURE_SAS_TOKEN\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# connection via SAS token\n", + "\n", + "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", + "storage_account_sas_token = AZURE_SAS_TOKEN\n", + "api_version = \"2023-08-03\"\n", + "request_time = datetime.datetime.utcnow().strftime(\"%a, %d %b %Y %H:%M:%S GMT\")\n", + "\n", + "containerName = \"ucsd-pilot\"\n", + "\n", + "url = f\"https://{storage_account_name}.dfs.core.windows.net/{containerName}?directory=Cirrus/4001&recursive=false&resource=filesystem&{storage_account_sas_token}\"\n", + "# url = f\"https://{storage_account_name}.dfs.core.windows.net/{folderName}?recursive=false&resource=filesystem&prefix=CGM%2F&delimiter=%2F&{storage_account_sas_token}\"\n", + "\n", + "headers = {\n", + " \"x-ms-date\": request_time,\n", + " \"x-ms-version\": api_version,\n", + "}\n", + "\n", + "response = requests.get(url, headers=headers)\n", + "# print(response.text)\n", + "pprint(response.json())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# connection via shared key\n", + "\n", + "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", + "storage_account_key = AZURE_ACCESS_KEY\n", + "api_version = '2023-08-03'\n", + "request_time = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')\n", + "\n", + "\n", + "string_params = {\n", + " 'Verb': 'GET',\n", + " 'Content-Encoding': '',\n", + " 'Content-Language': '',\n", + " 'Content-Length': '',\n", + " 'Content-MD5': '',\n", + " 'Content-Type': '',\n", + " 'Date': '',\n", + " 'If-Modified-Since': '',\n", + " 'If-Match': '',\n", + " 'If-None-Match': '',\n", + " 'If-Unmodified-Since': '',\n", + " 'Range': '',\n", + " 'CanonicalizedHeaders': 'x-ms-date:' + request_time + '\\nx-ms-version:' + api_version + '\\n',\n", + " 'CanonicalizedResource': '/' + storage_account_name + '/\\ncomp:properties\\nrestype:service'\n", + "}\n", + "\n", + "string_to_sign = (string_params['Verb'] + '\\n'\n", + " + string_params['Content-Encoding'] + '\\n'\n", + " + string_params['Content-Language'] + '\\n'\n", + " + string_params['Content-Length'] + '\\n'\n", + " + string_params['Content-MD5'] + '\\n'\n", + " + string_params['Content-Type'] + '\\n'\n", + " + string_params['Date'] + '\\n'\n", + " + string_params['If-Modified-Since'] + '\\n'\n", + " + string_params['If-Match'] + '\\n'\n", + " + string_params['If-None-Match'] + '\\n'\n", + " + string_params['If-Unmodified-Since'] + '\\n'\n", + " + string_params['Range'] + '\\n'\n", + " + string_params['CanonicalizedHeaders']\n", + " + string_params['CanonicalizedResource'])\n", + "\n", + "def _sign_string(key, string_to_sign):\n", + " key = base64.b64decode(key.encode('utf-8'))\n", + " string_to_sign = string_to_sign.encode('utf-8')\n", + " signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)\n", + " digest = signed_hmac_sha256.digest()\n", + " encoded_digest = base64.b64encode(digest).decode('utf-8')\n", + " return encoded_digest\n", + "\n", + "# signed_string = base64.b64encode(hmac.new(base64.b64decode(storage_account_key), msg=string_to_sign.encode('utf-8'), digestmod=hashlib.sha256).digest()).decode('utf-8')\n", + "# print(signed_string)\n", + "\n", + "signed_string = _sign_string(storage_account_key, string_to_sign=string_to_sign)\n", + "\n", + "headers = {\n", + " 'x-ms-date' : request_time,\n", + " 'x-ms-version' : api_version,\n", + " # 'Content-Length': \"\",\n", + " 'Authorization' : f\"SharedKey {storage_account_name}:{signed_string}\"\n", + "}\n", + "\n", + "dns_suffix = 'dfs.core.windows.net'\n", + "folderName = 'logging'\n", + "\n", + "url = f'https://{storage_account_name}.{dns_suffix}/{folderName}?resource=filesystem'\n", + "\n", + "response = requests.get(url, headers=headers)\n", + "pprint(response.json())" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "fairhub-api-dev-env", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/poetry.lock b/poetry.lock index 6e4a0d47..e322ea82 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2660,7 +2660,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2668,15 +2667,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2693,7 +2685,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2701,7 +2692,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3272,6 +3262,18 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +[[package]] +name = "types-python-dateutil" +version = "2.8.19.14" +description = "Typing stubs for python-dateutil" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, + {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, +] + [[package]] name = "types-requests" version = "2.31.0.2" @@ -3576,4 +3578,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "266d48f06cb7dac297184d0390f8d4e3404d9f60a2df4a68a4ff81adddf35d5f" +content-hash = "e3e1b2d0645e5cd7ad0281091d65e85b411eab0ddd7c475762e908bf9c10bdb4" diff --git a/pyproject.toml b/pyproject.toml index ca68dfba..2dee3a88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,6 +89,9 @@ poethepoet = "^0.20.0" jupyter = "^1.0.0" flake8 = "^6.0.0" +# Types +types-python-dateutil = "^2.8.19.14" + # Environment [tool.poe.tasks] From f6ff59660a24cb068d3e8874e3325f35a760e430 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 1 Nov 2023 16:29:58 -0700 Subject: [PATCH 328/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20read=20env=20into?= =?UTF-8?q?=20config=20for=20endpoints?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- alembic/env.py | 2 +- apis/authentication.py | 46 ++++++++++++++++-------------- config.py | 13 ++++----- model/invited_study_contributor.py | 2 +- notebooks/azure-blob.ipynb | 17 ++++++----- pytest_config.py | 11 +++---- 6 files changed, 46 insertions(+), 45 deletions(-) diff --git a/alembic/env.py b/alembic/env.py index 46675a44..af6903a6 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -12,7 +12,7 @@ config = context.config section = config.config_ini_section -print(section, environ.get("FAIRHUB_DATABASE_URL")) + config.set_section_option( section, "FAIRHUB_DATABASE_URL", str(environ.get("FAIRHUB_DATABASE_URL")) ) diff --git a/apis/authentication.py b/apis/authentication.py index e48179bf..b3ad0cff 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -1,9 +1,10 @@ +"""This module is used to authenticate users to the system and +handle few authentication errors. Also, it sets token for logged user +along with expiration date""" import datetime import importlib import os import re - -# import config import uuid from datetime import timezone from typing import Any, Union @@ -37,11 +38,16 @@ class UnauthenticatedException(Exception): + """Exception raised when a user is not authenticated.""" + + # TODO: Implement this exception pass @api.route("/auth/signup") class SignUpUser(Resource): + """SignUpUser class is used to sign up new users to the system""" + @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(signup_model) @@ -144,6 +150,8 @@ def validate_password(instance): @api.route("/auth/login") class Login(Resource): + """Login class is used to login users to the system""" + @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(login_model) @@ -156,9 +164,8 @@ def post(self): email_address = data["email_address"] def validate_is_valid_email(instance): - print("within is_valid_email") email_address = instance - print(email_address) + try: validate_email(email_address) return True @@ -352,16 +359,10 @@ def is_granted(permission: str, study=None): return permission in role[contributor.permission] -# -# def is_study_metadata(study_id: int): -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not delete study", 403 -# - - @api.route("/auth/logout") class Logout(Resource): + """Logout class is used to log out users from the system""" + @api.response(200, "Success") @api.response(400, "Validation Error") def post(self): @@ -379,14 +380,15 @@ def post(self): return resp -@api.route("/auth/current-users") -class CurrentUsers(Resource): - """function is used to see all logged users in - the system. For now, it is used for testing purposes""" +# @api.route("/auth/current-users") +# class CurrentUsers(Resource): +# """function is used to see all logged users in +# the system. For now, it is used for testing purposes""" - @api.response(200, "Success") - @api.response(400, "Validation Error") - def get(self): - if not g.user: - return None - return g.user.to_dict() +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def get(self): +# """returns all logged users in the system""" +# if not g.user: +# return None +# return g.user.to_dict() diff --git a/config.py b/config.py index 9eac5d19..54aa11f4 100644 --- a/config.py +++ b/config.py @@ -1,12 +1,11 @@ """Configuration for the application.""" -from os import environ -from dotenv import load_dotenv +from dotenv import dotenv_values # Load environment variables from .env -load_dotenv(".env") +config = dotenv_values(".env") -FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") -FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") +FAIRHUB_DATABASE_URL = config.get("FAIRHUB_DATABASE_URL") +FAIRHUB_SECRET = config.get("FAIRHUB_SECRET") -FAIRHUB_AZURE_READ_SAS_TOKEN = environ.get("FAIRHUB_AZURE_READ_SAS_TOKEN") -FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = environ.get("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") +FAIRHUB_AZURE_READ_SAS_TOKEN = config.get("FAIRHUB_AZURE_READ_SAS_TOKEN") +FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = config.get("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index dd4279e9..99b8ef24 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,7 +1,7 @@ import datetime +import random import uuid -import random from .db import db from .study import Study diff --git a/notebooks/azure-blob.ipynb b/notebooks/azure-blob.ipynb index ab911574..a05962eb 100644 --- a/notebooks/azure-blob.ipynb +++ b/notebooks/azure-blob.ipynb @@ -6,8 +6,7 @@ "metadata": {}, "outputs": [], "source": [ - "from os import environ\n", - "from dotenv import load_dotenv\n", + "from dotenv import dotenv_values\n", "import datetime\n", "import requests\n", "import base64\n", @@ -16,22 +15,22 @@ "from pprint import pprint\n", "\n", "# Load environment variables from .env\n", - "load_dotenv(\".env\")\n", + "config = dotenv_values(\".env\")\n", "\n", - "if not environ.get(\"AZURE_STORAGE_ACCOUNT_NAME\"):\n", + "if not config.get(\"AZURE_STORAGE_ACCOUNT_NAME\"):\n", " raise ValueError(\"AZURE_STORAGE_ACCOUNT_NAME is not set\")\n", "\n", - "AZURE_STORAGE_ACCOUNT_NAME = environ.get(\"AZURE_STORAGE_ACCOUNT_NAME\")\n", + "AZURE_STORAGE_ACCOUNT_NAME = config.get(\"AZURE_STORAGE_ACCOUNT_NAME\")\n", "\n", - "if not environ.get(\"AZURE_ACCESS_KEY\"):\n", + "if not config.get(\"AZURE_ACCESS_KEY\"):\n", " raise ValueError(\"AZURE_ACCESS_KEY is not set\")\n", "\n", - "AZURE_ACCESS_KEY = environ.get(\"AZURE_ACCESS_KEY\")\n", + "AZURE_ACCESS_KEY = config.get(\"AZURE_ACCESS_KEY\")\n", "\n", - "if not environ.get(\"AZURE_SAS_TOKEN\"):\n", + "if not config.get(\"AZURE_SAS_TOKEN\"):\n", " raise ValueError(\"AZURE_SAS_TOKEN is not set\")\n", "\n", - "AZURE_SAS_TOKEN = environ.get(\"AZURE_SAS_TOKEN\")" + "AZURE_SAS_TOKEN = config.get(\"AZURE_SAS_TOKEN\")" ] }, { diff --git a/pytest_config.py b/pytest_config.py index a6173df4..1d649ee0 100644 --- a/pytest_config.py +++ b/pytest_config.py @@ -1,12 +1,13 @@ """Configuration for testing the application.""" -from os import environ -from dotenv import load_dotenv +from dotenv import dotenv_values # Load environment variables from .env -load_dotenv(".env") +config = dotenv_values(".env") class TestConfig: - FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") - FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") + """Configuration for testing the application.""" + + FAIRHUB_DATABASE_URL = config.get("FAIRHUB_DATABASE_URL") + FAIRHUB_SECRET = config.get("FAIRHUB_SECRET") TESTING = True From fb3fb5cace0ebc1c41c9914afad862ec4a889831 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 1 Nov 2023 17:05:31 -0700 Subject: [PATCH 329/505] =?UTF-8?q?=F0=9F=92=9A=20fix:=20update=20configs?= =?UTF-8?q?=20for=20ci?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 9 ++++++++- config.py | 21 +++++++++++++++++---- pytest_config.py | 14 ++++++++++++-- 3 files changed, 37 insertions(+), 7 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index b3ad0cff..eec8339b 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -56,7 +56,14 @@ def post(self): """signs up the new users and saves data in DB""" data: Union[Any, dict] = request.json if os.environ.get("FLASK_ENV") != "testing": - if data["email_address"] not in ["test@fairhub.io"]: + bypassed_emails = [ + "test@fairhub.io", + "bpatel@fairhub.io", + "sanjay@fairhub.io", + "aydan@fairhub.io", + ] + + if data["email_address"] not in bypassed_emails: invite = model.StudyInvitedContributor.query.filter_by( email_address=data["email_address"] ).one_or_none() diff --git a/config.py b/config.py index 54aa11f4..51a7e8c4 100644 --- a/config.py +++ b/config.py @@ -1,11 +1,24 @@ """Configuration for the application.""" +from os import environ +from pathlib import Path from dotenv import dotenv_values +# Check if `.env` file exists +env_path = Path(".") / ".env" + +LOCAL_ENV_FILE = env_path.exists() + # Load environment variables from .env config = dotenv_values(".env") -FAIRHUB_DATABASE_URL = config.get("FAIRHUB_DATABASE_URL") -FAIRHUB_SECRET = config.get("FAIRHUB_SECRET") -FAIRHUB_AZURE_READ_SAS_TOKEN = config.get("FAIRHUB_AZURE_READ_SAS_TOKEN") -FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = config.get("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") +def get_env(key): + """Return environment variable from .env or native environment.""" + return config.get(key) if LOCAL_ENV_FILE else environ.get(key) + + +FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") +FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") + +FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") +FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") diff --git a/pytest_config.py b/pytest_config.py index 1d649ee0..9336ba61 100644 --- a/pytest_config.py +++ b/pytest_config.py @@ -1,13 +1,23 @@ """Configuration for testing the application.""" +from os import environ from dotenv import dotenv_values # Load environment variables from .env config = dotenv_values(".env") +IN_CI_ENV = environ.get("CI") + + +def get_env(key): + """Return environment variable from .env or native environment.""" + return environ.get(key) if IN_CI_ENV else config.get(key) + class TestConfig: """Configuration for testing the application.""" - FAIRHUB_DATABASE_URL = config.get("FAIRHUB_DATABASE_URL") - FAIRHUB_SECRET = config.get("FAIRHUB_SECRET") + # Load from native environment variables if running in CI environment + FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") + FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") + TESTING = True From 776e7b6e74615d424fdabf240e3429b737bc6ab8 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Wed, 1 Nov 2023 17:09:27 -0700 Subject: [PATCH 330/505] =?UTF-8?q?=E2=9C=A8=20feat:=20redcap=20endpoints?= =?UTF-8?q?=20done?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 6 +- apis/redcap.py | 404 ++++++++++++--- app.py | 25 +- model/study.py | 3 - model/study_metadata/study_redcap.py | 75 --- model/study_redcap_project_api.py | 3 + poetry.lock | 718 ++++++++++++--------------- sql/init.sql | 5 +- sql/init_timezones.sql | 5 +- sql/specific_tables.sql | 12 +- 10 files changed, 691 insertions(+), 565 deletions(-) delete mode 100644 model/study_metadata/study_redcap.py diff --git a/apis/authentication.py b/apis/authentication.py index 4e68bc2c..d38a8a59 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -103,7 +103,8 @@ def validate_password(instance): # Schema validation schema = { "type": "object", - "required": ["email_address", "password", "code"], + # "required": ["email_address", "password", "code"], + "required": ["email_address", "password"], "additionalProperties": False, "properties": { "email_address": {"type": "string", "format": "valid_email"}, @@ -111,7 +112,7 @@ def validate_password(instance): "type": "string", "format": "password", }, - "code": {"type": "string"}, + # "code": {"type": "string"}, }, } @@ -182,7 +183,6 @@ def validate_is_valid_email(instance): format_checker = FormatChecker() format_checker.checks("valid email")(validate_is_valid_email) - try: validate(instance=data, schema=schema, format_checker=format_checker) except ValidationError as e: diff --git a/apis/redcap.py b/apis/redcap.py index b7d2449c..6dc232ed 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -2,7 +2,7 @@ from typing import Any, Union from flask import request -from flask_restx import Namespace, Resource, fields +from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate import model @@ -17,8 +17,9 @@ "study_id": fields.String(required=True), "project_title": fields.String(required=True), "project_id": fields.String(required=True), - "project_api_token": fields.String(required=True), + "project_api_key": fields.String(required=True), "project_api_url": fields.String(required=True), + "project_api_active": fields.Boolean(required=True), }, ) @@ -32,6 +33,9 @@ "dashboard_name": fields.String( required=True, readonly=True, description="REDCap dashboard name" ), + "dashboard_modules": fields.String( + required=True, readonly=True, description="REDCap dashboard name" + ), "report_ids": fields.String( required=True, readonly=True, description="REDCap project report IDs" ), @@ -39,11 +43,11 @@ ) -@api.route("/study//redcap") -class RedcapProjectAPI(Resource): +@api.route("/study//redcap/all") +class RedcapProjectAPIs(Resource): """Study Redcap Metadata""" - @api.doc("redcap_project_api") + @api.doc("redcap_project_apis") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model, as_list=True) @@ -58,37 +62,93 @@ def get(self, study_id: int): redcap_project_api.to_dict() for redcap_project_api in redcap_project_apis ] - @api.doc("redcap_project_api") + +project_parser = reqparse.RequestParser().add_argument( + "project_id", type=str, help="REDCap project ID (pid)" +) + + +@api.route("/study//redcap") +class RedcapProjectAPI(Resource): + @api.doc(parser=project_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model) - def put(self, study_id: int): - study = model.Study.query.get(study_id) + def get(self, study_id: int): + study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 - data: Union[Any, dict] = request.json - update_redcap_project_api = model.StudyRedcapProjectApi.query.get( - data["project_id"] + project_id = project_parser.parse_args()["project_id"] + redcap_project_api = model.db.session.query(model.StudyRedcapProjectApi).get( + project_id ) - update_redcap_project_api.update(data) - model.db.session.commit() - return update_redcap_project_api.to_dict() + return redcap_project_api.to_dict(), 201 - @api.doc("redcap_project_api") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model) - def delete(self, study_id: int): - """Delete study redcap metadata""" + def put(self, study_id: int): + """Update study redcap""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify", 403 + # Schema validation data: Union[Any, dict] = request.json - redcap_project_api = model.StudyRedcapProjectApi.query.get(data["project_id"]) - model.db.session.delete(redcap_project_api) - model.db.session.commit() + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "project_title", + "project_id", + "project_api_url", + "project_api_key", + "project_api_active", + ], + "properties": { + "project_title": {"type": "string", "minLength": 1}, + "project_id": {"type": "string", "minLength": 5}, + "project_api_url": {"type": "string", "minLength": 1}, + "project_api_key": {"type": "string", "minLength": 32}, + "project_api_active": {"type": "boolean"}, + }, + } + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 - return 204 + if len(data["project_title"]) < 1: + return ( + f"redcap project_title is required for redcap access: {data['project_title']}", + 400, + ) + if len(data["project_id"]) < 1: + return ( + f"redcap project_id is required for redcap access: {data['project_id']}", + 400, + ) + if len(data["project_api_url"]) < 1: + return ( + f"redcap project_api_url is required for redcap access: {data['project_api_url']}", + 400, + ) + if len(data["project_api_key"]) < 1: + return ( + f"redcap project_api_key is required for redcap access: {data['project_api_key']}", + 400, + ) + if type(data["project_api_active"]) != bool: + return ( + f"redcap project_api_active is required for redcap access: {data['project_api_active']}", + 400, + ) + + update_study_redcap_project_api = model.StudyRedcapProjectApi.query.get( + data["project_id"] + ) + update_study_redcap_project_api.update(data) + model.db.session.commit() + return update_study_redcap_project_api.to_dict() @api.route("/study//redcap/add") @@ -103,56 +163,268 @@ def post(self, study_id: int): return "Access denied, you can not modify", 403 # Schema validation data: Union[Any, dict] = request.json - # schema = { - # "type": "object", - # "additionalProperties": False, - # "required": [ - # "project_title", - # "project_id", - # "project_api_url", - # "project_api_token", - # ], - # "properties": { - # "project_title": {"type": "string", "minLength": 1}, - # "project_id": {"type": "string", "minLength": 5}, - # "project_api_url": {"type": "string", "minLength": 1}, - # "project_api_token": {"type": "string", "minLength": 32}, - # }, - # } - - # try: - # validate(request.json, schema) - # except ValidationError as e: - # return e.message, 400 - - # if len(data["project_title"]) < 1: - # return ( - # f"redcap project_title is required for redcap access: {data['project_title']}", - # 400, - # ) - # if len(data["redcap_project_id"]) < 1: - # return ( - # f"redcap project_id is required for redcap access: {data['project_id']}", - # 400, - # ) - # if len(data["redcap_api_url"]) < 1: - # return ( - # f"redcap project_api_url is required for redcap access: {data['project_api_url']}", - # 400, - # ) - # if len(data["project_api_token"]) < 1: - # return ( - # f"redcap project_api_token is required for redcap access: {data['project_api_token']}", - # 400, - # ) - print("data", data) + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "project_title", + "project_id", + "project_api_url", + "project_api_key", + "project_api_active", + ], + "properties": { + "project_title": {"type": "string", "minLength": 1}, + "project_id": {"type": "string", "minLength": 5}, + "project_api_url": {"type": "string", "minLength": 1}, + "project_api_key": {"type": "string", "minLength": 32}, + "project_api_active": {"type": "boolean"}, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + if len(data["project_title"]) < 1: + return ( + f"redcap project_title is required for redcap access: {data['project_title']}", + 400, + ) + if len(data["project_id"]) < 1: + return ( + f"redcap project_id is required for redcap access: {data['project_id']}", + 400, + ) + if len(data["project_api_url"]) < 1: + return ( + f"redcap project_api_url is required for redcap access: {data['project_api_url']}", + 400, + ) + if len(data["project_api_key"]) < 1: + return ( + f"redcap project_api_key is required for redcap access: {data['project_api_key']}", + 400, + ) + if type(data["project_api_active"]) != bool: + return ( + f"redcap project_api_active is required for redcap access: {data['project_api_active']}", + 400, + ) + add_redcap_project_api = model.StudyRedcapProjectApi.from_data(study, data) model.db.session.add(add_redcap_project_api) model.db.session.commit() - print("redcap_project_api", add_redcap_project_api.to_dict()) return add_redcap_project_api.to_dict(), 201 +@api.route("/study//redcap/delete") +class DeleteRedcapProjectAPI(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_api_model) + def post(self, study_id: int): + """Delete study redcap metadata""" + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not delete study", 403 + data: Union[Any, dict] = request.json + delete_redcap_project_api = model.StudyRedcapProjectApi.query.filter_by( + project_id=data["project_id"] + ).delete() + model.db.session.commit() + return 204 + + +# @api.route("/study//redcap/") +# @api.expect(study_project_parser) +# class RedcapProjectAPI(Resource): +# """Study Redcap Metadata""" + +# @api.doc("redcap_project_api") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") + +# @api.marshal_with(redcap_project_api_model) +# def get(self, study_id: int, project_id: str): +# print(study_id, project_id) +# study = model.Study.query.get(study_id) +# if is_granted("redcap_access", study): +# return "Access denied, you can not modify", 403 +# data: Union[Any, dict] = request.json +# redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) +# print(redcap_project_api.to_dict()) +# return redcap_project_api.to_dict() + +# @api.doc("redcap_project_api") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_api_model) +# def delete(self, study_id: int, project_id: str): +# """Delete study redcap metadata""" +# study = model.Study.query.get(study_id) +# if is_granted("redcap_access", study): +# return "Access denied, you can not delete study", 403 +# data: Union[Any, dict] = request.json +# delete_redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) +# model.db.session.delete(delete_redcap_project_api) +# model.db.session.commit() +# return 204 + + +# @api.route("/study//redcap//edit") +# @api.expect(study_project_parser) +# class EditRedcapProjectAPI(Resource): +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_api_model) +# def put(self, study_id: int, project_id: str): +# """Update study redcap""" +# study = model.Study.query.get(study_id) +# if is_granted("redcap_access", study): +# return "Access denied, you can not modify", 403 +# # Schema validation +# data: Union[Any, dict] = request.json +# schema = { +# "type": "object", +# "additionalProperties": False, +# "required": [ +# "project_title", +# "project_id", +# "project_api_url", +# "project_api_key", +# "project_api_active", +# ], +# "properties": { +# "project_title": {"type": "string", "minLength": 1}, +# "project_id": {"type": "string", "minLength": 5}, +# "project_api_url": {"type": "string", "minLength": 1}, +# "project_api_key": {"type": "string", "minLength": 32}, +# "project_api_active": {"type": "boolean"}, +# }, +# } + +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 + +# if len(data["project_title"]) < 1: +# return ( +# f"redcap project_title is required for redcap access: {data['project_title']}", +# 400, +# ) +# if len(data["project_id"]) < 1: +# return ( +# f"redcap project_id is required for redcap access: {data['project_id']}", +# 400, +# ) +# if len(data["project_api_url"]) < 1: +# return ( +# f"redcap project_api_url is required for redcap access: {data['project_api_url']}", +# 400, +# ) +# if len(data["project_api_key"]) < 1: +# return ( +# f"redcap project_api_key is required for redcap access: {data['project_api_key']}", +# 400, +# ) +# if type(data["project_api_active"]) != bool: +# return ( +# f"redcap project_api_active is required for redcap access: {data['project_api_active']}", +# 400, +# ) + +# update_study_redcap_project_api = model.StudyRedcapProjectApi.query.get(data["project_id"]) +# update_study_redcap_project_api.update(data) +# model.db.session.commit() +# return update_study_redcap_project_api.to_dict() + + +@api.route("/study//redcap/dashboards") +class RedcapProjectDashboards(Resource): + """Study Redcap Metadata""" + + @api.doc("redcap_project_dashboards") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model, as_list=True) + def get(self, study_id: int): + """Get study redcap""" + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + # redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.all(study) + redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( + study=study + ) + return [ + redcap_project_dashboard.to_dict() + for redcap_project_dashboard in redcap_project_dashboards + ] + + +@api.route("/study//redcap/dashboard") +class RedcapProjectDashboard(Resource): + """Study Redcap Metadata""" + + @api.doc("redcap_project_dashboard") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def get(self, study_id: int, project_id: str, dashboard_id: str): + """Get study redcap""" + study = model.Study.query.get(study_id) + study_redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) + study_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( + dashboard_id + ) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + # redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.all(study) + redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( + study=study, + study_redcap_project_api=study_redcap_project_api, + study_redcap_project_dashboard=study_redcap_project_dashboard, + ) + return redcap_project_dashboard.to_dict() + + @api.doc("redcap_project_dashboard") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def put(self, study_id: int): + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + data: Union[Any, dict] = request.json + update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( + data["project_id"] + ) + update_redcap_project_dashboard.update(data) + model.db.session.commit() + return update_redcap_project_dashboard.to_dict() + + @api.doc("redcap_project_dashboard") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def delete(self, study_id: int): + """Delete study redcap metadata""" + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not delete study", 403 + data: Union[Any, dict] = request.json + redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( + data["project_id"] + ) + model.db.session.delete(redcap_project_dashboard) + model.db.session.commit() + + return 204 + + # @api.route("/study//redcap/") # class RedcapUpdate(Resource): # @api.doc("redcap") diff --git a/app.py b/app.py index fcacb756..55ddcb6c 100644 --- a/app.py +++ b/app.py @@ -132,19 +132,17 @@ def create_app(config_module=None): # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) - # - # @app.cli.command("create-schema") - # def create_schema(): - # engine = model.db.session.get_bind() - # metadata = MetaData() - # metadata = MetaData() - # metadata.reflect(bind=engine) - # table_names = [table.name for table in metadata.tables.values()] - # print(table_names) - # if len(table_names) == 0: - # with engine.begin() as conn: - # """Create the database schema.""" - # model.db.create_all() + @app.cli.command("create-schema") + def create_schema(): + engine = model.db.session.get_bind() + metadata = MetaData() + metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + if len(table_names) == 0: + with engine.begin() as conn: + """Create the database schema.""" + model.db.create_all() @app.before_request def on_before_request(): # pylint: disable = inconsistent-return-statements @@ -253,7 +251,6 @@ def destroy_schema(): metadata = MetaData() metadata.reflect(bind=engine) table_names = [table.name for table in metadata.tables.values()] - # print(table_names) if len(table_names) == 0: with engine.begin(): model.db.create_all() diff --git a/model/study.py b/model/study.py index 09e937b3..e2aa7e79 100644 --- a/model/study.py +++ b/model/study.py @@ -27,9 +27,6 @@ def __init__(self): self.study_ipdsharing = model.StudyIpdsharing(self) self.study_description = model.StudyDescription(self) self.study_identification.append(model.StudyIdentification(self, False)) - # NOTE: this has not been tested yet - self.study_redcap = model.StudyRedcap(self) - self.study_other = model.StudyOther(self) # self.study_contributors = model.StudyContributor(self) diff --git a/model/study_metadata/study_redcap.py b/model/study_metadata/study_redcap.py deleted file mode 100644 index cff9e2a8..00000000 --- a/model/study_metadata/study_redcap.py +++ /dev/null @@ -1,75 +0,0 @@ -from model import Study - -from ..db import db - - -class StudyRedcap(db.Model): # type: ignore - """A study is a collection of datasets and participants""" - - def __init__(self, study): - self.study = study - self.redcap_api_token = None - self.redcap_api_url = None - self.redcap_project_id = None - self.redcap_report_id_survey_completions = None - self.redcap_report_id_repeat_surveys = None - self.redcap_report_id_participant_values = None - self.redcap_report_id_participants = None - - __tablename__ = "study_redcap" - - redcap_api_token = db.Column(db.String, nullable=True) - redcap_api_url = db.Column(db.String, nullable=True) - redcap_project_id = db.Column(db.String, nullable=True) - redcap_report_id_survey_completions = db.Column(db.String, nullable=True) - redcap_report_id_repeat_surveys = db.Column(db.String, nullable=True) - redcap_report_id_participant_values = db.Column(db.String, nullable=True) - redcap_report_id_participants = db.Column(db.String, nullable=True) - - study_id = db.Column( - db.CHAR(36), - db.ForeignKey("study.id", ondelete="CASCADE"), - primary_key=True, - nullable=False, - ) - study = db.relationship("Study", back_populates="study_redcap") - - def to_dict(self): - """Converts the study to a dictionary""" - return { - "redcap_api_token": self.redcap_api_token, - "redcap_api_url": self.redcap_api_url, - "redcap_project_id": self.redcap_project_id, - "redcap_report_id_survey_completions": self.redcap_report_id_survey_completions, - "redcap_report_id_repeat_surveys": self.redcap_report_id_repeat_surveys, - "redcap_report_id_participant_values": self.redcap_report_id_participant_values, - "redcap_report_id_participants": self.redcap_report_id_participants, - } - - @staticmethod - def from_data(study: Study, data: dict): - """Creates a new study from a dictionary""" - study_redcap = StudyRedcap(study) - study_redcap.update(data) - - return study_redcap - - def update(self, data: dict): - """Updates the study from a dictionary""" - self.redcap_api_token = data["redcap_api_token"] - self.redcap_api_url = data["redcap_api_url"] - self.redcap_project_id = data["redcap_project_id"] - self.redcap_report_id_survey_completions = data[ - "redcap_report_id_survey_completions" - ] - self.redcap_report_id_repeat_surveys = data["redcap_report_id_repeat_surveys"] - self.redcap_report_id_participant_values = data[ - "redcap_report_id_participant_values" - ] - self.redcap_report_id_participants = data["redcap_report_id_participants"] - self.study.touch() - - def validate(self): - """Validates the study""" - violations: list = [] - return violations diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py index 203b70aa..ec8cfca5 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap_project_api.py @@ -20,6 +20,7 @@ def __init__(self, study): project_title = db.Column(db.String, nullable=False) project_api_url = db.Column(db.String, nullable=False) project_api_key = db.Column(db.String, nullable=False) + project_api_active = db.Column(db.Boolean, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) @@ -43,6 +44,7 @@ def to_dict(self): "project_id": self.project_id, "project_api_url": self.project_api_url, "project_api_key": self.project_api_key, + "project_api_active": self.project_api_active, } @staticmethod @@ -58,6 +60,7 @@ def update(self, data: dict): self.project_id = data["project_id"] self.project_api_url = data["project_api_url"] self.project_api_key = data["project_api_key"] + self.project_api_active = data["project_api_active"] self.updated_on = datetime.now(timezone.utc).timestamp() def validate(self): diff --git a/poetry.lock b/poetry.lock index 84bc21a4..0921086c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "aniso8601" version = "9.0.1" description = "A library for parsing ISO 8601 strings." +category = "main" optional = false python-versions = "*" files = [ @@ -18,6 +19,7 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] name = "anyio" version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -39,6 +41,7 @@ trio = ["trio (>=0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" +category = "dev" optional = false python-versions = "*" files = [ @@ -48,14 +51,9 @@ files = [ [[package]] name = "argon2-cffi" -<<<<<<< HEAD version = "23.1.0" description = "Argon2 for Python" category = "dev" -======= -version = "21.3.0" -description = "The secure Argon2 password hashing algorithm." ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 optional = false python-versions = ">=3.7" files = [ @@ -76,6 +74,7 @@ typing = ["mypy"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -113,6 +112,7 @@ tests = ["pytest"] name = "arrow" version = "1.3.0" description = "Better dates & times for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -132,6 +132,7 @@ test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "py name = "art" version = "6.1" description = "ASCII Art Library For Python" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -146,6 +147,7 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -163,25 +165,28 @@ wrapt = [ [[package]] name = "asttokens" -version = "2.4.0" +version = "2.4.1" description = "Annotate AST trees with source code positions" +category = "dev" optional = false python-versions = "*" files = [ - {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"}, - {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"}, + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] [package.dependencies] six = ">=1.12.0" [package.extras] -test = ["astroid", "pytest"] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -208,6 +213,7 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -226,6 +232,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.13.1" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -239,21 +246,11 @@ setuptools = {version = "*", markers = "python_version >= \"3.12\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] - [[package]] name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -288,6 +285,7 @@ typecheck = ["mypy"] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" +category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -306,6 +304,7 @@ lxml = ["lxml"] name = "black" version = "23.10.1" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -348,6 +347,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -364,13 +364,14 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "blinker" -version = "1.6.3" +version = "1.7.0" description = "Fast, simple object-to-object and broadcast signaling" +category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "blinker-1.6.3-py3-none-any.whl", hash = "sha256:296320d6c28b006eb5e32d4712202dbcdcbf5dc482da298c2f44881c43884aaa"}, - {file = "blinker-1.6.3.tar.gz", hash = "sha256:152090d27c1c5c722ee7e48504b02d76502811ce02e1523553b4cf8c8b3d3a8d"}, + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, ] [[package]] @@ -389,6 +390,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -400,6 +402,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -462,107 +465,109 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.1" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.1.tar.gz", hash = "sha256:d9137a876020661972ca6eec0766d81aef8a5627df628b664b234b73396e727e"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8aee051c89e13565c6bd366813c386939f8e928af93c29fda4af86d25b73d8f8"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:352a88c3df0d1fa886562384b86f9a9e27563d4704ee0e9d56ec6fcd270ea690"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:223b4d54561c01048f657fa6ce41461d5ad8ff128b9678cfe8b2ecd951e3f8a2"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f861d94c2a450b974b86093c6c027888627b8082f1299dfd5a4bae8e2292821"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1171ef1fc5ab4693c5d151ae0fdad7f7349920eabbaca6271f95969fa0756c2d"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28f512b9a33235545fbbdac6a330a510b63be278a50071a336afc1b78781b147"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0e842112fe3f1a4ffcf64b06dc4c61a88441c2f02f373367f7b4c1aa9be2ad5"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9bc2ce123637a60ebe819f9fccc614da1bcc05798bbbaf2dd4ec91f3e08846"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f194cce575e59ffe442c10a360182a986535fd90b57f7debfaa5c845c409ecc3"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9a74041ba0bfa9bc9b9bb2cd3238a6ab3b7618e759b41bd15b5f6ad958d17605"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b578cbe580e3b41ad17b1c428f382c814b32a6ce90f2d8e39e2e635d49e498d1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6db3cfb9b4fcecb4390db154e75b49578c87a3b9979b40cdf90d7e4b945656e1"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:debb633f3f7856f95ad957d9b9c781f8e2c6303ef21724ec94bea2ce2fcbd056"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win32.whl", hash = "sha256:87071618d3d8ec8b186d53cb6e66955ef2a0e4fa63ccd3709c0c90ac5a43520f"}, - {file = "charset_normalizer-3.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:e372d7dfd154009142631de2d316adad3cc1c36c32a38b16a4751ba78da2a397"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae4070f741f8d809075ef697877fd350ecf0b7c5837ed68738607ee0a2c572cf"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58e875eb7016fd014c0eea46c6fa92b87b62c0cb31b9feae25cbbe62c919f54d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbd95e300367aa0827496fe75a1766d198d34385a58f97683fe6e07f89ca3e3c"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de0b4caa1c8a21394e8ce971997614a17648f94e1cd0640fbd6b4d14cab13a72"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:985c7965f62f6f32bf432e2681173db41336a9c2611693247069288bcb0c7f8b"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a15c1fe6d26e83fd2e5972425a772cca158eae58b05d4a25a4e474c221053e2d"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae55d592b02c4349525b6ed8f74c692509e5adffa842e582c0f861751701a673"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be4d9c2770044a59715eb57c1144dedea7c5d5ae80c68fb9959515037cde2008"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:851cf693fb3aaef71031237cd68699dded198657ec1e76a76eb8be58c03a5d1f"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:31bbaba7218904d2eabecf4feec0d07469284e952a27400f23b6628439439fa7"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:871d045d6ccc181fd863a3cd66ee8e395523ebfbc57f85f91f035f50cee8e3d4"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:501adc5eb6cd5f40a6f77fbd90e5ab915c8fd6e8c614af2db5561e16c600d6f3"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f5fb672c396d826ca16a022ac04c9dce74e00a1c344f6ad1a0fdc1ba1f332213"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win32.whl", hash = "sha256:bb06098d019766ca16fc915ecaa455c1f1cd594204e7f840cd6258237b5079a8"}, - {file = "charset_normalizer-3.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:8af5a8917b8af42295e86b64903156b4f110a30dca5f3b5aedea123fbd638bff"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae8e5142dcc7a49168f4055255dbcced01dc1714a90a21f87448dc8d90617d1"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5b70bab78accbc672f50e878a5b73ca692f45f5b5e25c8066d748c09405e6a55"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ceca5876032362ae73b83347be8b5dbd2d1faf3358deb38c9c88776779b2e2f"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d95638ff3613849f473afc33f65c401a89f3b9528d0d213c7037c398a51296"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9edbe6a5bf8b56a4a84533ba2b2f489d0046e755c29616ef8830f9e7d9cf5728"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6a02a3c7950cafaadcd46a226ad9e12fc9744652cc69f9e5534f98b47f3bbcf"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10b8dd31e10f32410751b3430996f9807fc4d1587ca69772e2aa940a82ab571a"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edc0202099ea1d82844316604e17d2b175044f9bcb6b398aab781eba957224bd"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b891a2f68e09c5ef989007fac11476ed33c5c9994449a4e2c3386529d703dc8b"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:71ef3b9be10070360f289aea4838c784f8b851be3ba58cf796262b57775c2f14"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:55602981b2dbf8184c098bc10287e8c245e351cd4fdcad050bd7199d5a8bf514"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:46fb9970aa5eeca547d7aa0de5d4b124a288b42eaefac677bde805013c95725c"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:520b7a142d2524f999447b3a0cf95115df81c4f33003c51a6ab637cbda9d0bf4"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win32.whl", hash = "sha256:8ec8ef42c6cd5856a7613dcd1eaf21e5573b2185263d87d27c8edcae33b62a61"}, - {file = "charset_normalizer-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:baec8148d6b8bd5cee1ae138ba658c71f5b03e0d69d5907703e3e1df96db5e41"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63a6f59e2d01310f754c270e4a257426fe5a591dc487f1983b3bbe793cf6bac6"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d6bfc32a68bc0933819cfdfe45f9abc3cae3877e1d90aac7259d57e6e0f85b1"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f3100d86dcd03c03f7e9c3fdb23d92e32abbca07e7c13ebd7ddfbcb06f5991f"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39b70a6f88eebe239fa775190796d55a33cfb6d36b9ffdd37843f7c4c1b5dc67"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e12f8ee80aa35e746230a2af83e81bd6b52daa92a8afaef4fea4a2ce9b9f4fa"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b6cefa579e1237ce198619b76eaa148b71894fb0d6bcf9024460f9bf30fd228"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:61f1e3fb621f5420523abb71f5771a204b33c21d31e7d9d86881b2cffe92c47c"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4f6e2a839f83a6a76854d12dbebde50e4b1afa63e27761549d006fa53e9aa80e"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:1ec937546cad86d0dce5396748bf392bb7b62a9eeb8c66efac60e947697f0e58"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:82ca51ff0fc5b641a2d4e1cc8c5ff108699b7a56d7f3ad6f6da9dbb6f0145b48"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:633968254f8d421e70f91c6ebe71ed0ab140220469cf87a9857e21c16687c034"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win32.whl", hash = "sha256:c0c72d34e7de5604df0fde3644cc079feee5e55464967d10b24b1de268deceb9"}, - {file = "charset_normalizer-3.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:63accd11149c0f9a99e3bc095bbdb5a464862d77a7e309ad5938fbc8721235ae"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5a3580a4fdc4ac05f9e53c57f965e3594b2f99796231380adb2baaab96e22761"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2465aa50c9299d615d757c1c888bc6fef384b7c4aec81c05a0172b4400f98557"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb7cd68814308aade9d0c93c5bd2ade9f9441666f8ba5aa9c2d4b389cb5e2a45"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e43805ccafa0a91831f9cd5443aa34528c0c3f2cc48c4cb3d9a7721053874b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:854cc74367180beb327ab9d00f964f6d91da06450b0855cbbb09187bcdb02de5"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c15070ebf11b8b7fd1bfff7217e9324963c82dbdf6182ff7050519e350e7ad9f"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4c99f98fc3a1835af8179dcc9013f93594d0670e2fa80c83aa36346ee763d2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fb765362688821404ad6cf86772fc54993ec11577cd5a92ac44b4c2ba52155b"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dced27917823df984fe0c80a5c4ad75cf58df0fbfae890bc08004cd3888922a2"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a66bcdf19c1a523e41b8e9d53d0cedbfbac2e93c649a2e9502cb26c014d0980c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ecd26be9f112c4f96718290c10f4caea6cc798459a3a76636b817a0ed7874e42"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f70fd716855cd3b855316b226a1ac8bdb3caf4f7ea96edcccc6f484217c9597"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:17a866d61259c7de1bdadef418a37755050ddb4b922df8b356503234fff7932c"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win32.whl", hash = "sha256:548eefad783ed787b38cb6f9a574bd8664468cc76d1538215d510a3cd41406cb"}, - {file = "charset_normalizer-3.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:45f053a0ece92c734d874861ffe6e3cc92150e32136dd59ab1fb070575189c97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bc791ec3fd0c4309a753f95bb6c749ef0d8ea3aea91f07ee1cf06b7b02118f2f"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8c61fb505c7dad1d251c284e712d4e0372cef3b067f7ddf82a7fa82e1e9a93"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c092be3885a1b7899cd85ce24acedc1034199d6fca1483fa2c3a35c86e43041"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2000c54c395d9e5e44c99dc7c20a64dc371f777faf8bae4919ad3e99ce5253e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cb50a0335382aac15c31b61d8531bc9bb657cfd848b1d7158009472189f3d62"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c30187840d36d0ba2893bc3271a36a517a717f9fd383a98e2697ee890a37c273"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe81b35c33772e56f4b6cf62cf4aedc1762ef7162a31e6ac7fe5e40d0149eb67"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0bf89afcbcf4d1bb2652f6580e5e55a840fdf87384f6063c4a4f0c95e378656"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:06cf46bdff72f58645434d467bf5228080801298fbba19fe268a01b4534467f5"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3c66df3f41abee950d6638adc7eac4730a306b022570f71dd0bd6ba53503ab57"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd805513198304026bd379d1d516afbf6c3c13f4382134a2c526b8b854da1c2e"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9505dc359edb6a330efcd2be825fdb73ee3e628d9010597aa1aee5aa63442e97"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31445f38053476a0c4e6d12b047b08ced81e2c7c712e5a1ad97bc913256f91b2"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win32.whl", hash = "sha256:bd28b31730f0e982ace8663d108e01199098432a30a4c410d06fe08fdb9e93f4"}, - {file = "charset_normalizer-3.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:555fe186da0068d3354cdf4bbcbc609b0ecae4d04c921cc13e209eece7720727"}, - {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -577,6 +582,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -588,6 +594,7 @@ files = [ name = "comm" version = "0.1.4" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -607,6 +614,7 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.3.2" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -674,6 +682,7 @@ toml = ["tomli"] name = "coveragespace" version = "6.0.2" description = "A place to track your code coverage metrics." +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -688,55 +697,11 @@ docopt = ">=0.6" minilog = ">=2.0" requests = ">=2.28,<3.0" -[[package]] -name = "cryptography" -version = "41.0.5" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - [[package]] name = "debugpy" version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -764,6 +729,7 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -775,6 +741,7 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -786,6 +753,7 @@ files = [ name = "dicttoxml" version = "1.7.16" description = "Converts a Python dictionary or other native data type into a valid XML string." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -797,6 +765,7 @@ files = [ name = "dill" version = "0.3.7" description = "serialize all of Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -811,6 +780,7 @@ graph = ["objgraph (>=1.7.2)"] name = "dnspython" version = "2.4.2" description = "DNS toolkit" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -830,6 +800,7 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" +category = "dev" optional = false python-versions = "*" files = [ @@ -840,6 +811,7 @@ files = [ name = "email-validator" version = "2.1.0.post1" description = "A robust email address syntax and deliverability validation library." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -855,6 +827,7 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -867,13 +840,14 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.0" +version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" +category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "executing-2.0.0-py2.py3-none-any.whl", hash = "sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657"}, - {file = "executing-2.0.0.tar.gz", hash = "sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"}, + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, ] [package.extras] @@ -883,6 +857,7 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth name = "faker" version = "18.13.0" description = "Faker is a Python package that generates fake data for you." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -897,6 +872,7 @@ python-dateutil = ">=2.4" name = "fastjsonschema" version = "2.18.1" description = "Fastest Python implementation of JSON schema" +category = "dev" optional = false python-versions = "*" files = [ @@ -911,6 +887,7 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -927,6 +904,7 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.3" description = "A simple framework for building complex web applications." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -949,6 +927,7 @@ dotenv = ["python-dotenv"] name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." +category = "main" optional = false python-versions = "*" files = [ @@ -980,6 +959,7 @@ Flask = "*" name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" +category = "main" optional = false python-versions = "*" files = [ @@ -994,6 +974,7 @@ Flask = ">=0.9" name = "flask-restx" version = "1.2.0" description = "Fully featured framework for fast, easy and documented API development with Flask" +category = "main" optional = false python-versions = "*" files = [ @@ -1018,6 +999,7 @@ test = ["Faker (==2.0.0)", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pyt name = "flask-sqlalchemy" version = "3.1.1" description = "Add SQLAlchemy support to your Flask application." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1033,6 +1015,7 @@ sqlalchemy = ">=2.0.16" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -1044,6 +1027,7 @@ files = [ name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1058,6 +1042,7 @@ python-dateutil = ">=2.7" name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." +category = "dev" optional = false python-versions = "*" files = [ @@ -1075,10 +1060,10 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "greenlet" version = "3.0.1" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, @@ -1136,68 +1121,6 @@ files = [ {file = "greenlet-3.0.1-cp39-cp39-win32.whl", hash = "sha256:cf868e08690cb89360eebc73ba4be7fb461cfbc6168dd88e2fbbe6f31812cd57"}, {file = "greenlet-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:ac4a39d1abae48184d420aa8e5e63efd1b75c8444dd95daa3e03f6c6310e9619"}, {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, -======= - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 ] [package.extras] @@ -1208,6 +1131,7 @@ test = ["objgraph", "psutil"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1219,10 +1143,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -<<<<<<< HEAD category = "dev" -======= ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 optional = false python-versions = ">=3.8" files = [ @@ -1242,6 +1163,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.1.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1257,6 +1179,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1268,6 +1191,7 @@ files = [ name = "ipykernel" version = "6.26.0" description = "IPython Kernel for Jupyter" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1281,7 +1205,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1299,47 +1223,47 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.16.1" +version = "8.17.2" description = "IPython: Productive Interactive Computing" +category = "dev" optional = false python-versions = ">=3.9" files = [ - {file = "ipython-8.16.1-py3-none-any.whl", hash = "sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e"}, - {file = "ipython-8.16.1.tar.gz", hash = "sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"}, + {file = "ipython-8.17.2-py3-none-any.whl", hash = "sha256:1e4d1d666a023e3c93585ba0d8e962867f7a111af322efff6b9c58062b3e5444"}, + {file = "ipython-8.17.2.tar.gz", hash = "sha256:126bb57e1895594bb0d91ea3090bbd39384f6fe87c3d57fd558d0670f50339bb"}, ] [package.dependencies] appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] [[package]] name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" +category = "dev" optional = false python-versions = "*" files = [ @@ -1351,6 +1275,7 @@ files = [ name = "ipywidgets" version = "8.1.1" description = "Jupyter interactive widgets" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1372,6 +1297,7 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1386,6 +1312,7 @@ arrow = ">=0.15.0" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1403,6 +1330,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1414,6 +1342,7 @@ files = [ name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1433,6 +1362,7 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1450,6 +1380,7 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." +category = "dev" optional = false python-versions = "*" files = [ @@ -1464,6 +1395,7 @@ dev = ["hypothesis"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1473,22 +1405,14 @@ files = [ [[package]] name = "jsonschema" -<<<<<<< HEAD version = "4.17.3" -======= -version = "4.19.1" ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.7" files = [ -<<<<<<< HEAD {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, -======= - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 ] [package.dependencies] @@ -1508,27 +1432,10 @@ format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validat format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] -<<<<<<< HEAD -======= -name = "jsonschema-specifications" -version = "2023.7.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, -] - -[package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.28.0" - -[[package]] ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 name = "jupyter" version = "1.0.0" description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "dev" optional = false python-versions = "*" files = [ @@ -1549,6 +1456,7 @@ qtconsole = "*" name = "jupyter-client" version = "8.5.0" description = "Jupyter protocol implementation and client libraries" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1557,12 +1465,7 @@ files = [ ] [package.dependencies] -<<<<<<< HEAD jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -======= -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1576,6 +1479,7 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-console" version = "6.6.3" description = "Jupyter terminal console" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1587,7 +1491,7 @@ files = [ ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" prompt-toolkit = ">=3.0.30" pygments = "*" pyzmq = ">=17" @@ -1598,13 +1502,14 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.4.0" +version = "5.5.0" description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.4.0-py3-none-any.whl", hash = "sha256:66e252f675ac04dcf2feb6ed4afb3cd7f68cf92f483607522dc251f32d471571"}, - {file = "jupyter_core-5.4.0.tar.gz", hash = "sha256:e4b98344bb94ee2e3e6c4519a97d001656009f9cb2b7f2baf15b3c205770011d"}, + {file = "jupyter_core-5.5.0-py3-none-any.whl", hash = "sha256:e11e02cd8ae0a9de5c6c44abf5727df9f2581055afe00b22183f621ba3585805"}, + {file = "jupyter_core-5.5.0.tar.gz", hash = "sha256:880b86053bf298a8724994f95e99b99130659022a4f7f45f563084b6223861d3"}, ] [package.dependencies] @@ -1613,13 +1518,14 @@ pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_ traitlets = ">=5.3" [package.extras] -docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" version = "0.6.3" description = "Jupyter Event System library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1644,6 +1550,7 @@ test = ["click", "coverage", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>= name = "jupyter-lsp" version = "2.2.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1658,6 +1565,7 @@ jupyter-server = ">=1.1.2" name = "jupyter-server" version = "2.9.1" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1670,7 +1578,7 @@ anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" jupyter-events = ">=0.6.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1694,6 +1602,7 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-terminals" version = "0.4.4" description = "A Jupyter Server Extension Providing Terminals." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1713,6 +1622,7 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", name = "jupyterlab" version = "4.0.7" description = "JupyterLab computational environment" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1744,6 +1654,7 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1755,6 +1666,7 @@ files = [ name = "jupyterlab-server" version = "2.24.0" description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1780,6 +1692,7 @@ test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-valida name = "jupyterlab-widgets" version = "3.0.9" description = "Jupyter interactive widgets for JupyterLab" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1787,23 +1700,11 @@ files = [ {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, ] -[[package]] -name = "jwt" -version = "1.3.1" -description = "JSON Web Token library for Python 3." -optional = false -python-versions = ">= 3.6" -files = [ - {file = "jwt-1.3.1-py3-none-any.whl", hash = "sha256:61c9170f92e736b530655e75374681d4fcca9cfa8763ab42be57353b2b203494"}, -] - -[package.dependencies] -cryptography = ">=3.1,<3.4.0 || >3.4.0" - [[package]] name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1849,6 +1750,7 @@ files = [ name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1863,6 +1765,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1922,6 +1825,7 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1936,6 +1840,7 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1947,6 +1852,7 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1958,6 +1864,7 @@ files = [ name = "minilog" version = "2.2" description = "Minimalistic wrapper for Python logging." +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1969,6 +1876,7 @@ files = [ name = "mistune" version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1980,6 +1888,7 @@ files = [ name = "mkdocs" version = "1.3.1" description = "Project documentation with Markdown." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2006,6 +1915,7 @@ i18n = ["babel (>=2.9.0)"] name = "mypy" version = "1.6.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2052,6 +1962,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2063,6 +1974,7 @@ files = [ name = "nbclient" version = "0.8.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -2072,7 +1984,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -2083,13 +1995,14 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.9.2" +version = "7.10.0" description = "Converting Jupyter Notebooks" +category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.9.2-py3-none-any.whl", hash = "sha256:39fe4b8bdd1b0104fdd86fc8a43a9077ba64c720bda4c6132690d917a0a154ee"}, - {file = "nbconvert-7.9.2.tar.gz", hash = "sha256:e56cc7588acc4f93e2bb5a34ec69028e4941797b2bfaf6462f18a41d1cc258c9"}, + {file = "nbconvert-7.10.0-py3-none-any.whl", hash = "sha256:8cf1d95e569730f136feb85e4bba25bdcf3a63fefb122d854ddff6771c0ac933"}, + {file = "nbconvert-7.10.0.tar.gz", hash = "sha256:4bedff08848626be544de193b7594d98a048073f392178008ff4f171f5e21d26"}, ] [package.dependencies] @@ -2122,6 +2035,7 @@ webpdf = ["playwright"] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2143,6 +2057,7 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.8" description = "Patch asyncio to allow nested event loops" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2154,6 +2069,7 @@ files = [ name = "notebook" version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2177,6 +2093,7 @@ test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4 name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2236,6 +2153,7 @@ files = [ name = "overrides" version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2247,6 +2165,7 @@ files = [ name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2256,44 +2175,44 @@ files = [ [[package]] name = "pandas" -version = "2.1.1" +version = "2.1.2" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58d997dbee0d4b64f3cb881a24f918b5f25dd64ddf31f467bb9b67ae4c63a1e4"}, - {file = "pandas-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02304e11582c5d090e5a52aec726f31fe3f42895d6bfc1f28738f9b64b6f0614"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa8f0966de2c22de408d0e322db2faed6f6e74265aa0856f3824813cf124363"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f84c144dee086fe4f04a472b5cd51e680f061adf75c1ae4fc3a9275560f8f4"}, - {file = "pandas-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ce97667d06d69396d72be074f0556698c7f662029322027c226fd7a26965cb"}, - {file = "pandas-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:4c3f32fd7c4dccd035f71734df39231ac1a6ff95e8bdab8d891167197b7018d2"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e2959720b70e106bb1d8b6eadd8ecd7c8e99ccdbe03ee03260877184bb2877d"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25e8474a8eb258e391e30c288eecec565bfed3e026f312b0cbd709a63906b6f8"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bd1685556f3374520466998929bade3076aeae77c3e67ada5ed2b90b4de7f0"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3657869c7902810f32bd072f0740487f9e030c1a3ab03e0af093db35a9d14e"}, - {file = "pandas-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:05674536bd477af36aa2effd4ec8f71b92234ce0cc174de34fd21e2ee99adbc2"}, - {file = "pandas-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:b407381258a667df49d58a1b637be33e514b07f9285feb27769cedb3ab3d0b3a"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c747793c4e9dcece7bb20156179529898abf505fe32cb40c4052107a3c620b49"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bcad1e6fb34b727b016775bea407311f7721db87e5b409e6542f4546a4951ea"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5ec7740f9ccb90aec64edd71434711f58ee0ea7f5ed4ac48be11cfa9abf7317"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29deb61de5a8a93bdd033df328441a79fcf8dd3c12d5ed0b41a395eef9cd76f0"}, - {file = "pandas-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f99bebf19b7e03cf80a4e770a3e65eee9dd4e2679039f542d7c1ace7b7b1daa"}, - {file = "pandas-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:84e7e910096416adec68075dc87b986ff202920fb8704e6d9c8c9897fe7332d6"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366da7b0e540d1b908886d4feb3d951f2f1e572e655c1160f5fde28ad4abb750"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e50e72b667415a816ac27dfcfe686dc5a0b02202e06196b943d54c4f9c7693e"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1ab6a25da197f03ebe6d8fa17273126120874386b4ac11c1d687df288542dd"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0dbfea0dd3901ad4ce2306575c54348d98499c95be01b8d885a2737fe4d7a98"}, - {file = "pandas-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0489b0e6aa3d907e909aef92975edae89b1ee1654db5eafb9be633b0124abe97"}, - {file = "pandas-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cdb0fab0400c2cb46dafcf1a0fe084c8bb2480a1fa8d81e19d15e12e6d4ded2"}, - {file = "pandas-2.1.1.tar.gz", hash = "sha256:fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b"}, + {file = "pandas-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:24057459f19db9ebb02984c6fdd164a970b31a95f38e4a49cf7615b36a1b532c"}, + {file = "pandas-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6cf8fcc8a63d333970b950a7331a30544cf59b1a97baf0a7409e09eafc1ac38"}, + {file = "pandas-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ae6ffbd9d614c20d028c7117ee911fc4e266b4dca2065d5c5909e401f8ff683"}, + {file = "pandas-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff794eeb7883c5aefb1ed572e7ff533ae779f6c6277849eab9e77986e352688"}, + {file = "pandas-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02954e285e8e2f4006b6f22be6f0df1f1c3c97adbb7ed211c6b483426f20d5c8"}, + {file = "pandas-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:5b40c9f494e1f27588c369b9e4a6ca19cd924b3a0e1ef9ef1a8e30a07a438f43"}, + {file = "pandas-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:08d287b68fd28906a94564f15118a7ca8c242e50ae7f8bd91130c362b2108a81"}, + {file = "pandas-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bbd98dcdcd32f408947afdb3f7434fade6edd408c3077bbce7bd840d654d92c6"}, + {file = "pandas-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e90c95abb3285d06f6e4feedafc134306a8eced93cb78e08cf50e224d5ce22e2"}, + {file = "pandas-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52867d69a54e71666cd184b04e839cff7dfc8ed0cd6b936995117fdae8790b69"}, + {file = "pandas-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d0382645ede2fde352da2a885aac28ec37d38587864c0689b4b2361d17b1d4c"}, + {file = "pandas-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:65177d1c519b55e5b7f094c660ed357bb7d86e799686bb71653b8a4803d8ff0d"}, + {file = "pandas-2.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5aa6b86802e8cf7716bf4b4b5a3c99b12d34e9c6a9d06dad254447a620437931"}, + {file = "pandas-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d594e2ce51b8e0b4074e6644758865dc2bb13fd654450c1eae51201260a539f1"}, + {file = "pandas-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3223f997b6d2ebf9c010260cf3d889848a93f5d22bb4d14cd32638b3d8bba7ad"}, + {file = "pandas-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4944dc004ca6cc701dfa19afb8bdb26ad36b9bed5bcec617d2a11e9cae6902"}, + {file = "pandas-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3f76280ce8ec216dde336e55b2b82e883401cf466da0fe3be317c03fb8ee7c7d"}, + {file = "pandas-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:7ad20d24acf3a0042512b7e8d8fdc2e827126ed519d6bd1ed8e6c14ec8a2c813"}, + {file = "pandas-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:021f09c15e1381e202d95d4a21ece8e7f2bf1388b6d7e9cae09dfe27bd2043d1"}, + {file = "pandas-2.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7f12b2de0060b0b858cfec0016e7d980ae5bae455a1746bfcc70929100ee633"}, + {file = "pandas-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c166b9bb27c1715bed94495d9598a7f02950b4749dba9349c1dd2cbf10729d"}, + {file = "pandas-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25c9976c17311388fcd953cb3d0697999b2205333f4e11e669d90ff8d830d429"}, + {file = "pandas-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:851b5afbb0d62f6129ae891b533aa508cc357d5892c240c91933d945fff15731"}, + {file = "pandas-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:e78507adcc730533619de07bfdd1c62b2918a68cd4419ea386e28abf7f6a1e5c"}, + {file = "pandas-2.1.2.tar.gz", hash = "sha256:52897edc2774d2779fbeb6880d2cfb305daa0b1a29c16b91f531a18918a6e0f3"}, ] [package.dependencies] numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -2327,6 +2246,7 @@ xml = ["lxml (>=4.8.0)"] name = "pandocfilters" version = "1.5.0" description = "Utilities for writing pandoc filters in python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2338,6 +2258,7 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2353,6 +2274,7 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2364,6 +2286,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2375,6 +2298,7 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." +category = "dev" optional = false python-versions = "*" files = [ @@ -2386,34 +2310,10 @@ files = [ ptyprocess = ">=0.5" [[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - -[[package]] -<<<<<<< HEAD -======= -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - -[[package]] ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2429,6 +2329,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2444,6 +2345,7 @@ testing = ["pytest", "pytest-benchmark"] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2460,13 +2362,14 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "prometheus-client" -version = "0.17.1" +version = "0.18.0" description = "Python client for the Prometheus monitoring system." +category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, + {file = "prometheus_client-0.18.0-py3-none-any.whl", hash = "sha256:8de3ae2755f890826f4b6479e5571d4f74ac17a81345fe69a6778fdb92579184"}, + {file = "prometheus_client-0.18.0.tar.gz", hash = "sha256:35f7a8c22139e2bb7ca5a698e92d38145bc8dc74c1c0bf56f25cca886a764e17"}, ] [package.extras] @@ -2476,6 +2379,7 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2490,6 +2394,7 @@ wcwidth = "*" name = "psutil" version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2518,6 +2423,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2538,6 +2444,7 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -2549,6 +2456,7 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "dev" optional = false python-versions = "*" files = [ @@ -2561,14 +2469,14 @@ tests = ["pytest"] [[package]] name = "pycap" -version = "2.4.0" +version = "2.5.0" description = "PyCap: Python interface to REDCap" category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "pycap-2.4.0-py3-none-any.whl", hash = "sha256:056f432204f20c99b1479dff41ac798da3e2a995f421dd081de330729585a483"}, - {file = "pycap-2.4.0.tar.gz", hash = "sha256:15f84e8b61cd39efbd30449e73738850629581165a2144b15d676ceb7936d49e"}, + {file = "pycap-2.5.0-py3-none-any.whl", hash = "sha256:f483e0c8405f4dc904932d79d4d6076bd508a29e0c1e4636443316844109e9b5"}, + {file = "pycap-2.5.0.tar.gz", hash = "sha256:3c61e5cab844e4dec1be6318eac2fe639161221cad62a0fa5526c573e07cc406"}, ] [package.dependencies] @@ -2582,6 +2490,7 @@ data-science = ["pandas (>=1.3.4,<2.0.0)"] name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2593,6 +2502,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2604,6 +2514,7 @@ files = [ name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2621,6 +2532,7 @@ toml = ["tomli (>=1.2.3)"] name = "pyfairdatatools" version = "0.1.3" description = "Tools for AI-READI" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2643,6 +2555,7 @@ validators = ">=0.20.0,<0.21.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2654,6 +2567,7 @@ files = [ name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2668,6 +2582,7 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2685,6 +2600,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2713,6 +2629,7 @@ testutils = ["gitpython (>3)"] name = "pymdown-extensions" version = "10.3.1" description = "Extension pack for Python Markdown." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2773,6 +2690,7 @@ files = [ name = "pytest" version = "7.4.3" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2795,6 +2713,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2813,6 +2732,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-describe" version = "2.1.0" description = "Describe-style plugin for pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2827,6 +2747,7 @@ pytest = ">=4.6,<8" name = "pytest-expecter" version = "3.0" description = "Better testing with expecter and pytest." +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2838,6 +2759,7 @@ files = [ name = "pytest-random" version = "0.02" description = "py.test plugin to randomize tests" +category = "dev" optional = false python-versions = "*" files = [ @@ -2851,6 +2773,7 @@ pytest = ">=2.2.3" name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2865,6 +2788,7 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2879,6 +2803,7 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2890,6 +2815,7 @@ files = [ name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -2901,6 +2827,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "dev" optional = false python-versions = "*" files = [ @@ -2924,6 +2851,7 @@ files = [ name = "pywinpty" version = "2.0.12" description = "Pseudo terminal support for Windows from Python." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2939,6 +2867,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2998,6 +2927,7 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3012,6 +2942,7 @@ pyyaml = "*" name = "pyzmq" version = "25.1.1" description = "Python bindings for 0MQ" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3117,6 +3048,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qtconsole" version = "5.4.4" description = "Jupyter Qt console" +category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -3143,6 +3075,7 @@ test = ["flaky", "pytest", "pytest-qt"] name = "qtpy" version = "2.4.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3157,16 +3090,10 @@ packaging = "*" test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] -<<<<<<< HEAD name = "redis" version = "5.0.1" description = "Python client for Redis database and key-value store" category = "main" -======= -name = "referencing" -version = "0.30.0" -description = "JSON Referencing + Python" ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 optional = false python-versions = ">=3.7" files = [ @@ -3185,6 +3112,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3206,6 +3134,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3220,6 +3149,7 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3228,16 +3158,10 @@ files = [ ] [[package]] -<<<<<<< HEAD name = "semantic-version" version = "2.10.0" description = "A library implementing the 'SemVer' scheme." category = "main" -======= -name = "rpds-py" -version = "0.9.2" -description = "Python bindings to Rust's persistent data structures (rpds)" ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 optional = false python-versions = ">=2.7" files = [ @@ -3253,6 +3177,7 @@ doc = ["Sphinx", "sphinx-rtd-theme"] name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3286,6 +3211,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3297,6 +3223,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3308,6 +3235,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -3319,6 +3247,7 @@ files = [ name = "soupsieve" version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3330,6 +3259,7 @@ files = [ name = "sqlalchemy" version = "2.0.22" description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3416,6 +3346,7 @@ sqlcipher = ["sqlcipher3-binary"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" optional = false python-versions = "*" files = [ @@ -3435,6 +3366,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "terminado" version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3455,6 +3387,7 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3473,6 +3406,7 @@ test = ["flake8", "isort", "pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3484,6 +3418,7 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3495,6 +3430,7 @@ files = [ name = "tornado" version = "6.3.3" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -3513,13 +3449,14 @@ files = [ [[package]] name = "traitlets" -version = "5.12.0" +version = "5.13.0" description = "Traitlets Python configuration system" +category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.12.0-py3-none-any.whl", hash = "sha256:81539f07f7aebcde2e4b5ab76727f53eabf18ad155c6ed7979a681411602fa47"}, - {file = "traitlets-5.12.0.tar.gz", hash = "sha256:833273bf645d8ce31dcb613c56999e2e055b1ffe6d09168a164bcd91c36d5d35"}, + {file = "traitlets-5.13.0-py3-none-any.whl", hash = "sha256:baf991e61542da48fe8aef8b779a9ea0aa38d8a54166ee250d5af5ecf4486619"}, + {file = "traitlets-5.13.0.tar.gz", hash = "sha256:9b232b9430c8f57288c1024b34a8f0251ddcc47268927367a0dd3eeaca40deb5"}, ] [package.extras] @@ -3527,16 +3464,10 @@ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=3.0.3)", "mypy (>=1.6.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] [[package]] -<<<<<<< HEAD name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" category = "dev" -======= -name = "types-requests" -version = "2.31.0.2" -description = "Typing stubs for requests" ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 optional = false python-versions = "*" files = [ @@ -3563,6 +3494,7 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" +category = "main" optional = false python-versions = "*" files = [ @@ -3572,14 +3504,9 @@ files = [ [[package]] name = "typing-extensions" -<<<<<<< HEAD version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" category = "main" -======= -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 optional = false python-versions = ">=3.8" files = [ @@ -3603,6 +3530,7 @@ files = [ name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3617,6 +3545,7 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3633,6 +3562,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." +category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3649,6 +3579,7 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3686,19 +3617,21 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.8" +version = "0.2.9" description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, - {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, + {file = "wcwidth-0.2.9-py2.py3-none-any.whl", hash = "sha256:9a929bd8380f6cd9571a968a9c8f4353ca58d7cd812a4822bba831f8d685b223"}, + {file = "wcwidth-0.2.9.tar.gz", hash = "sha256:a675d1a4a2d24ef67096a04b85b02deeecd8e226f57b5e3a72dbb9ed99d27da8"}, ] [[package]] name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3714,6 +3647,7 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" +category = "dev" optional = false python-versions = "*" files = [ @@ -3725,6 +3659,7 @@ files = [ name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3741,6 +3676,7 @@ test = ["websockets"] name = "werkzeug" version = "2.3.7" description = "The comprehensive WSGI web application library." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3758,6 +3694,7 @@ watchdog = ["watchdog (>=2.3)"] name = "widgetsnbextension" version = "4.0.9" description = "Jupyter interactive widgets for Jupyter Notebook" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3769,6 +3706,7 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3853,10 +3791,7 @@ files = [ name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -<<<<<<< HEAD category = "dev" -======= ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 optional = false python-versions = ">=3.8" files = [ @@ -3870,10 +3805,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -<<<<<<< HEAD python-versions = ">=3.10.12,<3.13" content-hash = "fb9ccdfdb1d36a84cc2af2f9540de70bb14027c6d3e6b628c2c123092543413c" -======= -python-versions = "^3.8.16" -content-hash = "266d48f06cb7dac297184d0390f8d4e3404d9f60a2df4a68a4ff81adddf35d5f" ->>>>>>> 9da3ac0e75ed75ed8f45bef1ee7d5167de0dd676 diff --git a/sql/init.sql b/sql/init.sql index 5ed11a48..2c54b874 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -938,6 +938,7 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( "project_title" VARCHAR NOT NULL, "project_api_url" VARCHAR NOT NULL, "project_api_key" CHAR(32) NOT NULL, + "project_api_active" BOOLEAN NOT NULL, "created_at" BIGINT NOT NULL, "updated_on" BIGINT NOT NULL, PRIMARY KEY ("study_id", "project_id"), @@ -946,8 +947,8 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( -- Dumping data for table public.study_redcap_project_api: 1 rows /*!40000 ALTER TABLE "study_redcap_project_api" DISABLE KEYS */; -INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "project_api_active", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', 0, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_redcap_project_api" ENABLE KEYS */; -- Dumping structure for table public.study_redcap_project_dashboard diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index a8a8b5a2..92ab23a8 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -1053,6 +1053,7 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( "project_title" VARCHAR NOT NULL, "project_api_url" VARCHAR NOT NULL, "project_api_key" CHAR(32) NOT NULL, + "project_api_active" BOOLEAN NOT NULL, "created_at" BIGINT NOT NULL, "updated_on" BIGINT NOT NULL, PRIMARY KEY ("study_id", "project_id"), @@ -1061,8 +1062,8 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( -- Dumping data for table public.study_redcap_project_api: 1 rows /*!40000 ALTER TABLE "study_redcap_project_api" DISABLE KEYS */; -INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "project_api_active", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', 0, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_redcap_project_api" ENABLE KEYS */; -- Dumping structure for table public.study_redcap_project_dashboard diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index 60a36289..2940e14e 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -63,12 +63,12 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); -INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "updated_on", "created_at") VALUES - ('00000000-0000-0000-0000-000000000001', '11111', 'ai-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA1', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000002', '22222', 'dev-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA2', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000003', '33333', 'ops-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA3', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000004', '44444', 'data-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000005', '55555', 'more-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', '2023-08-13 16:23:48', '2023-08-13 16:23:49'); +INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "project_api_active", "updated_on", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', '11111', 'ai-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA1', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '22222', 'dev-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA2', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '33333', 'ops-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA3', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '44444', 'data-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '55555', 'more-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'); INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES ('00000000-0000-0000-0000-000000000001', '11111', '10000000-0000-0000-0000-000000000000', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), From 8d24d1968ccd49f9d52dc7b13754735083d5ff63 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Wed, 1 Nov 2023 17:14:50 -0700 Subject: [PATCH 331/505] Merge branch 'staging' of https://github.com/AI-READI/api.fairhub.io into staging - cleanup --- config.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/config.py b/config.py index 522ddc3a..ac2425a3 100644 --- a/config.py +++ b/config.py @@ -5,16 +5,11 @@ # Check if `.env` file exists env_path = Path(".") / ".env" - LOCAL_ENV_FILE = env_path.exists() # Load environment variables from .env config = dotenv_values(".env") -<<<<<<< HEAD -from dotenv import load_dotenv -load_dotenv(".env") - FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") CACHE_DEFAULT_TIMEOUT = environ.get("CACHE_DEFAULT_TIMEOUT") @@ -23,7 +18,6 @@ CACHE_PORT = environ.get("CACHE_PORT") CACHE_DB = environ.get("CACHE_DB") CACHE_URL = environ.get("CACHE_URL") -======= def get_env(key): """Return environment variable from .env or native environment.""" @@ -35,4 +29,3 @@ def get_env(key): FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") ->>>>>>> fb3fb5cace0ebc1c41c9914afad862ec4a889831 From 10ef3bd75f32381cb7185d85cf7021839438ad8f Mon Sep 17 00:00:00 2001 From: Greenstick Date: Thu, 2 Nov 2023 02:08:58 -0700 Subject: [PATCH 332/505] =?UTF-8?q?=F0=9F=9A=A8=20chore:=20fix=20redcap=20?= =?UTF-8?q?flake=20errors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap.py | 206 ++---------------------- app.py | 41 +---- config.py | 18 +-- model/study_redcap_project_api.py | 1 - model/study_redcap_project_dashboard.py | 3 +- 5 files changed, 28 insertions(+), 241 deletions(-) diff --git a/apis/redcap.py b/apis/redcap.py index 6dc232ed..8a899139 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -45,14 +45,12 @@ @api.route("/study//redcap/all") class RedcapProjectAPIs(Resource): - """Study Redcap Metadata""" - @api.doc("redcap_project_apis") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model, as_list=True) def get(self, study_id: int): - """Get study redcap""" + """List all study REDCap project API links""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 @@ -75,6 +73,7 @@ class RedcapProjectAPI(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model) def get(self, study_id: int): + """Get study REDCap project API link""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 @@ -88,7 +87,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model) def put(self, study_id: int): - """Update study redcap""" + """Update study REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 @@ -137,7 +136,7 @@ def put(self, study_id: int): f"redcap project_api_key is required for redcap access: {data['project_api_key']}", 400, ) - if type(data["project_api_active"]) != bool: + if type(data["project_api_active"]) is not bool: return ( f"redcap project_api_active is required for redcap access: {data['project_api_active']}", 400, @@ -157,7 +156,7 @@ class AddRedcapProjectAPI(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model) def post(self, study_id: int): - """Update study redcap""" + """Create new study REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 @@ -207,7 +206,7 @@ def post(self, study_id: int): f"redcap project_api_key is required for redcap access: {data['project_api_key']}", 400, ) - if type(data["project_api_active"]) != bool: + if type(data["project_api_active"]) is not bool: return ( f"redcap project_api_active is required for redcap access: {data['project_api_active']}", 400, @@ -225,7 +224,7 @@ class DeleteRedcapProjectAPI(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_api_model) def post(self, study_id: int): - """Delete study redcap metadata""" + """Delete study REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not delete study", 403 @@ -234,112 +233,7 @@ def post(self, study_id: int): project_id=data["project_id"] ).delete() model.db.session.commit() - return 204 - - -# @api.route("/study//redcap/") -# @api.expect(study_project_parser) -# class RedcapProjectAPI(Resource): -# """Study Redcap Metadata""" - -# @api.doc("redcap_project_api") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") - -# @api.marshal_with(redcap_project_api_model) -# def get(self, study_id: int, project_id: str): -# print(study_id, project_id) -# study = model.Study.query.get(study_id) -# if is_granted("redcap_access", study): -# return "Access denied, you can not modify", 403 -# data: Union[Any, dict] = request.json -# redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) -# print(redcap_project_api.to_dict()) -# return redcap_project_api.to_dict() - -# @api.doc("redcap_project_api") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_api_model) -# def delete(self, study_id: int, project_id: str): -# """Delete study redcap metadata""" -# study = model.Study.query.get(study_id) -# if is_granted("redcap_access", study): -# return "Access denied, you can not delete study", 403 -# data: Union[Any, dict] = request.json -# delete_redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) -# model.db.session.delete(delete_redcap_project_api) -# model.db.session.commit() -# return 204 - - -# @api.route("/study//redcap//edit") -# @api.expect(study_project_parser) -# class EditRedcapProjectAPI(Resource): -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_api_model) -# def put(self, study_id: int, project_id: str): -# """Update study redcap""" -# study = model.Study.query.get(study_id) -# if is_granted("redcap_access", study): -# return "Access denied, you can not modify", 403 -# # Schema validation -# data: Union[Any, dict] = request.json -# schema = { -# "type": "object", -# "additionalProperties": False, -# "required": [ -# "project_title", -# "project_id", -# "project_api_url", -# "project_api_key", -# "project_api_active", -# ], -# "properties": { -# "project_title": {"type": "string", "minLength": 1}, -# "project_id": {"type": "string", "minLength": 5}, -# "project_api_url": {"type": "string", "minLength": 1}, -# "project_api_key": {"type": "string", "minLength": 32}, -# "project_api_active": {"type": "boolean"}, -# }, -# } - -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 - -# if len(data["project_title"]) < 1: -# return ( -# f"redcap project_title is required for redcap access: {data['project_title']}", -# 400, -# ) -# if len(data["project_id"]) < 1: -# return ( -# f"redcap project_id is required for redcap access: {data['project_id']}", -# 400, -# ) -# if len(data["project_api_url"]) < 1: -# return ( -# f"redcap project_api_url is required for redcap access: {data['project_api_url']}", -# 400, -# ) -# if len(data["project_api_key"]) < 1: -# return ( -# f"redcap project_api_key is required for redcap access: {data['project_api_key']}", -# 400, -# ) -# if type(data["project_api_active"]) != bool: -# return ( -# f"redcap project_api_active is required for redcap access: {data['project_api_active']}", -# 400, -# ) - -# update_study_redcap_project_api = model.StudyRedcapProjectApi.query.get(data["project_id"]) -# update_study_redcap_project_api.update(data) -# model.db.session.commit() -# return update_study_redcap_project_api.to_dict() + return delete_redcap_project_api, 204 @api.route("/study//redcap/dashboards") @@ -382,13 +276,15 @@ def get(self, study_id: int, project_id: str, dashboard_id: str): ) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 - # redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.all(study) redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( study=study, study_redcap_project_api=study_redcap_project_api, study_redcap_project_dashboard=study_redcap_project_dashboard, ) - return redcap_project_dashboard.to_dict() + return [ + redcap_project_dashboard.to_dict() + for redcap_project_dashboard in redcap_project_dashboards + ] @api.doc("redcap_project_dashboard") @api.response(200, "Success") @@ -423,81 +319,3 @@ def delete(self, study_id: int): model.db.session.commit() return 204 - - -# @api.route("/study//redcap/") -# class RedcapUpdate(Resource): -# @api.doc("redcap") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_api_model) -# def delete(self, study_id: int, redcap_project_id: str): -# """Delete study redcap metadata""" -# data: Union[Any, dict] = request.json -# if not is_granted("study_metadata", study_id): -# return "Access denied, you can not delete study", 403 -# redcap_project_api = model.StudyRedcapProjectApi.query.get(data["project_id"]) -# model.db.session.delete(redcap_project_api) -# model.db.session.commit() - -# return 204 - -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_model) -# def put(self, study_id: int): -# """Update study redcap""" -# # Schema validation -# schema = { -# "type": "object", -# "additionalProperties": False, -# "required": [ -# "redcap_api_token", -# "redcap_api_url", -# "redcap_project_id", -# "redcap_report_id_survey_completions", -# "redcap_report_id_repeat_surveys", -# "redcap_report_id_participant_values", -# "redcap_report_id_participants", -# ], -# "properties": { -# "redcap_api_token": {"type": string, "minLength": 1}, -# "redcap_api_url": {"type": string, "minLength": 1}, -# "redcap_project_id": {"type": string, "minLength": 1}, -# "redcap_report_id_participants": {"type": string, "minLength": 1}, -# "redcap_report_id_survey_completions": {"type": string}, -# "redcap_report_id_repeat_surveys": {"type": string}, -# "redcap_report_id_participant_values": {"type": string}, -# }, -# } - -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 - -# data: Union[Any, dict] = request.json -# if len(data["redcap_api_url"]) < 1: -# return ( -# f"recap_api_url is required for redcap access: {data['redcap_api_url']}", -# 400, -# ) -# if len(data["redcap_api_token"]) < 1: -# return ( -# f"recap_api_token is required for redcap access: {data['redcap_api_token']}", -# 400, -# ) -# if len(data["redcap_project_id"]) < 1: -# return ( -# f"recap_project_id is required for redcap access: {data['redcap_project_id']}", -# 400, -# ) - -# study_obj = model.Study.query.get(study_id) -# if not is_granted("viewer", study_id): -# return "Access denied, you can not modify", 403 -# study = model.Study.query.get(study_id) -# study.study_redcap.update(request.json) -# model.db.session.commit() - -# return study.study_redcap.to_dict() diff --git a/app.py b/app.py index 55ddcb6c..db6dd334 100644 --- a/app.py +++ b/app.py @@ -14,11 +14,9 @@ import config import model -import modules from apis import api from apis.authentication import UnauthenticatedException, authentication, authorization from apis.exception import ValidationException -from caching import create_cache # from pyfairdatatools import __version__ @@ -45,8 +43,6 @@ def create_app(config_module=None): # csrf.init_app(app) app.config.from_prefixed_env("FAIRHUB") - - # print(app.config) if config.FAIRHUB_SECRET: if len(config.FAIRHUB_SECRET) < 32: raise RuntimeError("FAIRHUB_SECRET must be at least 32 characters long") @@ -63,34 +59,12 @@ def create_app(config_module=None): # throw error raise RuntimeError("FAIRHUB_DATABASE_URL not set") - # Update this for - - cache = create_cache(app) - - # for key in app.config: - # if "CACHE" in key: - # print(f"{key}: {app.config[key]}") - # if "CACHE_URL" in app.config: - - # app.config["CACHE_URL"] = app.config["CACHE_URL"] - # app.config["CACHE_HOST"]= app.config["CACHE_HOST"] if "CACHE_HOST" in app.config["CACHE_HOST"] else "localhost" - # app.config["CACHE_PORT"]= app.config["CACHE_PORT"] if "CACHE_PORT" in app.config["CACHE_PORT"] else 6379 - # app.config["CACHE_DB"]= app.config["CACHE_DB"] if "CACHE_DB" in app.config["CACHE_DB"] else 0 - # app.config["CACHE_DEFAULT_TIMEOUT"]= app.config["CACHE_DEFAULT_TIMEOUT"] if "CACHE_DEFAULT_TIMEOUT" in app.config else 86400 - # app.config["CACHE_KEY_PREFIX"]= app.config["CACHE_KEY_PREFIX"] if "CACHE_KEY_PREFIX" in app.config else "fairhub-io#" - - # cache = Cache( - # config={ - # "CACHE_TYPE": "RedisCache", - # "CACHE_DEBUG": False, - # "CACHE_DEFAULT_TIMEOUT": app.config["CACHE_DEFAULT_TIMEOUT"], - # "CACHE_KEY_PREFIX": app.config["CACHE_KEY_PREFIX"], - # "CACHE_REDIS_HOST": app.config["CACHE_HOST"], - # "CACHE_REDIS_PORT": app.config["CACHE_PORT"], - # "CACHE_REDIS_DB": app.config["CACHE_DB"], - # "CACHE_REDIS_URL": app.config["CACHE_URL"], - # } - # ) + cache_config = { + key: value + for key, value in app.config.items() + if (len(key) > 5) and (key[0:5] == "CACHE") + } + cache = Cache(config=cache_config) # Moved down here to allow for loading of redis cache prior to API model.db.init_app(app) @@ -106,6 +80,7 @@ def create_app(config_module=None): "/*": { "origins": [ "http://localhost:3000", + "https://localhost:3000", "https:\/\/brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://fairhub.io", ], @@ -140,7 +115,7 @@ def create_schema(): metadata.reflect(bind=engine) table_names = [table.name for table in metadata.tables.values()] if len(table_names) == 0: - with engine.begin() as conn: + with engine.begin(): """Create the database schema.""" model.db.create_all() diff --git a/config.py b/config.py index ac2425a3..c04ea65a 100644 --- a/config.py +++ b/config.py @@ -10,22 +10,18 @@ # Load environment variables from .env config = dotenv_values(".env") -FAIRHUB_DATABASE_URL = environ.get("FAIRHUB_DATABASE_URL") -FAIRHUB_SECRET = environ.get("FAIRHUB_SECRET") -CACHE_DEFAULT_TIMEOUT = environ.get("CACHE_DEFAULT_TIMEOUT") -CACHE_KEY_PREFIX = environ.get("CACHE_KEY_PREFIX") -CACHE_HOST = environ.get("CACHE_HOST") -CACHE_PORT = environ.get("CACHE_PORT") -CACHE_DB = environ.get("CACHE_DB") -CACHE_URL = environ.get("CACHE_URL") - def get_env(key): """Return environment variable from .env or native environment.""" return config.get(key) if LOCAL_ENV_FILE else environ.get(key) - FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") - FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") +CACHE_DEFAULT_TIMEOUT = get_env("CACHE_DEFAULT_TIMEOUT") +CACHE_KEY_PREFIX = get_env("CACHE_KEY_PREFIX") +CACHE_HOST = get_env("CACHE_HOST") +CACHE_PORT = get_env("CACHE_PORT") +CACHE_DB = get_env("CACHE_DB") +CACHE_URL = get_env("CACHE_URL") +CACHE_TYPE = get_env("CACHE_TYPE") diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py index ec8cfca5..90c36560 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap_project_api.py @@ -4,7 +4,6 @@ from model import Study from .db import db -from .study import Study class StudyRedcapProjectApi(db.Model): # type: ignore diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index 22565f38..1e2973d9 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -4,10 +4,9 @@ from sqlalchemy import String from sqlalchemy.dialects.postgresql import ARRAY -from model import Study, StudyRedcapProjectApi +from model import Study from .db import db -from .study import Study class StudyRedcapProjectDashboard(db.Model): # type: ignore From dadc624bfdb6c4dbc90779c48970af5d15288a5b Mon Sep 17 00:00:00 2001 From: Greenstick Date: Thu, 2 Nov 2023 13:11:47 -0700 Subject: [PATCH 333/505] =?UTF-8?q?=E2=9C=A8feat:=20hide=20redcap=20api=20?= =?UTF-8?q?token?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap.py | 98 ++++++++++--------------- model/study_redcap_project_api.py | 9 +-- model/study_redcap_project_dashboard.py | 9 ++- 3 files changed, 46 insertions(+), 70 deletions(-) diff --git a/apis/redcap.py b/apis/redcap.py index 8a899139..1d72bdb9 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -11,9 +11,18 @@ api = Namespace("Redcap", description="Redcap operations", path="/") +redcap_project_view_model = api.model( + "RedcapProjectAPI", { + "study_id": fields.String(required=True), + "project_title": fields.String(required=True), + "project_id": fields.String(required=True), + "project_api_url": fields.String(required=True), + "project_api_active": fields.Boolean(required=True), + }, +) + redcap_project_api_model = api.model( - "RedcapProjectAPI", - { + "RedcapProjectAPI", { "study_id": fields.String(required=True), "project_title": fields.String(required=True), "project_id": fields.String(required=True), @@ -24,8 +33,7 @@ ) redcap_project_dashboard_model = api.model( - "RedcapProjectDashboard", - { + "RedcapProjectDashboard", { "project_id": fields.String(required=True), "dashboard_id": fields.String( required=True, readonly=True, description="REDCap dashboard ID" @@ -42,50 +50,44 @@ }, ) - @api.route("/study//redcap/all") class RedcapProjectAPIs(Resource): @api.doc("redcap_project_apis") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_api_model, as_list=True) + @api.marshal_with(redcap_project_view_model, as_list=True) def get(self, study_id: int): """List all study REDCap project API links""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 # redcap_project_apis = model.StudyRedcapProjectApi.query.all(study) - redcap_project_apis = model.StudyRedcapProjectApi.query.filter_by(study=study) - return [ - redcap_project_api.to_dict() for redcap_project_api in redcap_project_apis - ] - + redcap_project_views = model.StudyRedcapProjectApi.query.filter_by(study=study) + return [redcap_project_view.to_dict() for redcap_project_view in redcap_project_views] project_parser = reqparse.RequestParser().add_argument( - "project_id", type=str, help="REDCap project ID (pid)" + "project_id", + type=str, + help="REDCap project ID (pid)" ) - - @api.route("/study//redcap") class RedcapProjectAPI(Resource): @api.doc(parser=project_parser) @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_api_model) + @api.marshal_with(redcap_project_view_model) def get(self, study_id: int): """Get study REDCap project API link""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 project_id = project_parser.parse_args()["project_id"] - redcap_project_api = model.db.session.query(model.StudyRedcapProjectApi).get( - project_id - ) - return redcap_project_api.to_dict(), 201 + redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get(project_id) + return redcap_project_view.to_dict(), 201 @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_api_model) + @api.marshal_with(redcap_project_view_model) def put(self, study_id: int): """Update study REDCap project API link""" study = model.Study.query.get(study_id) @@ -100,14 +102,12 @@ def put(self, study_id: int): "project_title", "project_id", "project_api_url", - "project_api_key", "project_api_active", ], "properties": { "project_title": {"type": "string", "minLength": 1}, - "project_id": {"type": "string", "minLength": 5}, + "project_id": {"type": "string", "minLength": 1, "maxLength": 12}, "project_api_url": {"type": "string", "minLength": 1}, - "project_api_key": {"type": "string", "minLength": 32}, "project_api_active": {"type": "boolean"}, }, } @@ -131,30 +131,22 @@ def put(self, study_id: int): f"redcap project_api_url is required for redcap access: {data['project_api_url']}", 400, ) - if len(data["project_api_key"]) < 1: - return ( - f"redcap project_api_key is required for redcap access: {data['project_api_key']}", - 400, - ) if type(data["project_api_active"]) is not bool: return ( f"redcap project_api_active is required for redcap access: {data['project_api_active']}", 400, ) - update_study_redcap_project_api = model.StudyRedcapProjectApi.query.get( - data["project_id"] - ) - update_study_redcap_project_api.update(data) + update_redcap_project_view = model.StudyRedcapProjectApi.query.get(data["project_id"]) + update_redcap_project_view.update(data) model.db.session.commit() - return update_study_redcap_project_api.to_dict() - + return update_redcap_project_view, 201 @api.route("/study//redcap/add") class AddRedcapProjectAPI(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_api_model) + @api.marshal_with(redcap_project_view_model) def post(self, study_id: int): """Create new study REDCap project API link""" study = model.Study.query.get(study_id) @@ -215,14 +207,13 @@ def post(self, study_id: int): add_redcap_project_api = model.StudyRedcapProjectApi.from_data(study, data) model.db.session.add(add_redcap_project_api) model.db.session.commit() - return add_redcap_project_api.to_dict(), 201 - + return add_redcap_project_api, 201 @api.route("/study//redcap/delete") class DeleteRedcapProjectAPI(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_api_model) + @api.marshal_with(redcap_project_view_model) def post(self, study_id: int): """Delete study REDCap project API link""" study = model.Study.query.get(study_id) @@ -230,12 +221,11 @@ def post(self, study_id: int): return "Access denied, you can not delete study", 403 data: Union[Any, dict] = request.json delete_redcap_project_api = model.StudyRedcapProjectApi.query.filter_by( - project_id=data["project_id"] + project_id = data["project_id"] ).delete() model.db.session.commit() return delete_redcap_project_api, 204 - @api.route("/study//redcap/dashboards") class RedcapProjectDashboards(Resource): """Study Redcap Metadata""" @@ -250,14 +240,8 @@ def get(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 # redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.all(study) - redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( - study=study - ) - return [ - redcap_project_dashboard.to_dict() - for redcap_project_dashboard in redcap_project_dashboards - ] - + redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by(study=study) + return [redcap_project_dashboard.to_dict() for redcap_project_dashboard in redcap_project_dashboards] @api.route("/study//redcap/dashboard") class RedcapProjectDashboard(Resource): @@ -271,9 +255,7 @@ def get(self, study_id: int, project_id: str, dashboard_id: str): """Get study redcap""" study = model.Study.query.get(study_id) study_redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) - study_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( - dashboard_id - ) + study_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get(dashboard_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( @@ -281,10 +263,8 @@ def get(self, study_id: int, project_id: str, dashboard_id: str): study_redcap_project_api=study_redcap_project_api, study_redcap_project_dashboard=study_redcap_project_dashboard, ) - return [ - redcap_project_dashboard.to_dict() - for redcap_project_dashboard in redcap_project_dashboards - ] + return [redcap_project_dashboard.to_dict() for redcap_project_dashboard in redcap_project_dashboards] + @api.doc("redcap_project_dashboard") @api.response(200, "Success") @@ -295,9 +275,7 @@ def put(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 data: Union[Any, dict] = request.json - update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( - data["project_id"] - ) + update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get(data["project_id"]) update_redcap_project_dashboard.update(data) model.db.session.commit() return update_redcap_project_dashboard.to_dict() @@ -312,9 +290,7 @@ def delete(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not delete study", 403 data: Union[Any, dict] = request.json - redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( - data["project_id"] - ) + redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get(data["project_id"]) model.db.session.delete(redcap_project_dashboard) model.db.session.commit() diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py index 90c36560..106c865f 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap_project_api.py @@ -55,11 +55,10 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - self.project_title = data["project_title"] - self.project_id = data["project_id"] - self.project_api_url = data["project_api_url"] - self.project_api_key = data["project_api_key"] - self.project_api_active = data["project_api_active"] + assignable = {key for key in self.to_dict().keys() if key.startswith("project")} + for key, val in data.items(): + if (key in assignable): + setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() def validate(self): diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index 1e2973d9..95f4c3af 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -5,7 +5,6 @@ from sqlalchemy.dialects.postgresql import ARRAY from model import Study - from .db import db @@ -63,11 +62,13 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - self.dashboard_id = data["dashboard_id"] - self.dashboard_name = data["dashboard_name"] - self.dashboard_endpoint = data["dashboard_endpoint"] + assignable = {key for key in self.__dict__.keys() if key.startswith("dashboard")} + for key, val in data.items(): + if (key in assignable): + setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() + def validate(self): """Validates the study""" violations: list = [] From eb710282eca51897e102962c96c3ee621a5ec686 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Thu, 2 Nov 2023 20:12:24 +0000 Subject: [PATCH 334/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap.py | 63 ++++++++++++++++------ caching/cache.py | 33 +++++++++--- config.py | 2 + model/study_redcap_project_api.py | 2 +- model/study_redcap_project_dashboard.py | 7 +-- modules/etl/transforms/redcap_transform.py | 1 + 6 files changed, 80 insertions(+), 28 deletions(-) diff --git a/apis/redcap.py b/apis/redcap.py index 1d72bdb9..db4e002a 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -12,7 +12,8 @@ api = Namespace("Redcap", description="Redcap operations", path="/") redcap_project_view_model = api.model( - "RedcapProjectAPI", { + "RedcapProjectAPI", + { "study_id": fields.String(required=True), "project_title": fields.String(required=True), "project_id": fields.String(required=True), @@ -22,7 +23,8 @@ ) redcap_project_api_model = api.model( - "RedcapProjectAPI", { + "RedcapProjectAPI", + { "study_id": fields.String(required=True), "project_title": fields.String(required=True), "project_id": fields.String(required=True), @@ -33,7 +35,8 @@ ) redcap_project_dashboard_model = api.model( - "RedcapProjectDashboard", { + "RedcapProjectDashboard", + { "project_id": fields.String(required=True), "dashboard_id": fields.String( required=True, readonly=True, description="REDCap dashboard ID" @@ -50,6 +53,7 @@ }, ) + @api.route("/study//redcap/all") class RedcapProjectAPIs(Resource): @api.doc("redcap_project_apis") @@ -63,13 +67,17 @@ def get(self, study_id: int): return "Access denied, you can not modify", 403 # redcap_project_apis = model.StudyRedcapProjectApi.query.all(study) redcap_project_views = model.StudyRedcapProjectApi.query.filter_by(study=study) - return [redcap_project_view.to_dict() for redcap_project_view in redcap_project_views] + return [ + redcap_project_view.to_dict() + for redcap_project_view in redcap_project_views + ] + project_parser = reqparse.RequestParser().add_argument( - "project_id", - type=str, - help="REDCap project ID (pid)" + "project_id", type=str, help="REDCap project ID (pid)" ) + + @api.route("/study//redcap") class RedcapProjectAPI(Resource): @api.doc(parser=project_parser) @@ -82,7 +90,9 @@ def get(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 project_id = project_parser.parse_args()["project_id"] - redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get(project_id) + redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get( + project_id + ) return redcap_project_view.to_dict(), 201 @api.response(200, "Success") @@ -137,11 +147,14 @@ def put(self, study_id: int): 400, ) - update_redcap_project_view = model.StudyRedcapProjectApi.query.get(data["project_id"]) + update_redcap_project_view = model.StudyRedcapProjectApi.query.get( + data["project_id"] + ) update_redcap_project_view.update(data) model.db.session.commit() return update_redcap_project_view, 201 + @api.route("/study//redcap/add") class AddRedcapProjectAPI(Resource): @api.response(200, "Success") @@ -209,6 +222,7 @@ def post(self, study_id: int): model.db.session.commit() return add_redcap_project_api, 201 + @api.route("/study//redcap/delete") class DeleteRedcapProjectAPI(Resource): @api.response(200, "Success") @@ -221,11 +235,12 @@ def post(self, study_id: int): return "Access denied, you can not delete study", 403 data: Union[Any, dict] = request.json delete_redcap_project_api = model.StudyRedcapProjectApi.query.filter_by( - project_id = data["project_id"] + project_id=data["project_id"] ).delete() model.db.session.commit() return delete_redcap_project_api, 204 + @api.route("/study//redcap/dashboards") class RedcapProjectDashboards(Resource): """Study Redcap Metadata""" @@ -240,8 +255,14 @@ def get(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 # redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.all(study) - redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by(study=study) - return [redcap_project_dashboard.to_dict() for redcap_project_dashboard in redcap_project_dashboards] + redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( + study=study + ) + return [ + redcap_project_dashboard.to_dict() + for redcap_project_dashboard in redcap_project_dashboards + ] + @api.route("/study//redcap/dashboard") class RedcapProjectDashboard(Resource): @@ -255,7 +276,9 @@ def get(self, study_id: int, project_id: str, dashboard_id: str): """Get study redcap""" study = model.Study.query.get(study_id) study_redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) - study_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get(dashboard_id) + study_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( + dashboard_id + ) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( @@ -263,8 +286,10 @@ def get(self, study_id: int, project_id: str, dashboard_id: str): study_redcap_project_api=study_redcap_project_api, study_redcap_project_dashboard=study_redcap_project_dashboard, ) - return [redcap_project_dashboard.to_dict() for redcap_project_dashboard in redcap_project_dashboards] - + return [ + redcap_project_dashboard.to_dict() + for redcap_project_dashboard in redcap_project_dashboards + ] @api.doc("redcap_project_dashboard") @api.response(200, "Success") @@ -275,7 +300,9 @@ def put(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 data: Union[Any, dict] = request.json - update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get(data["project_id"]) + update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( + data["project_id"] + ) update_redcap_project_dashboard.update(data) model.db.session.commit() return update_redcap_project_dashboard.to_dict() @@ -290,7 +317,9 @@ def delete(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not delete study", 403 data: Union[Any, dict] = request.json - redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get(data["project_id"]) + redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( + data["project_id"] + ) model.db.session.delete(redcap_project_dashboard) model.db.session.commit() diff --git a/caching/cache.py b/caching/cache.py index 1714d17d..1613c62a 100644 --- a/caching/cache.py +++ b/caching/cache.py @@ -1,14 +1,33 @@ from flask_caching import Cache -def create_cache (app): + +def create_cache(app): cache = None try: - app.config["CACHE_URL"] = app.config["CACHE_URL"] if "CACHE_URL" in app.config else "redis://127.0.0.1:6379" - app.config["CACHE_HOST"]= app.config["CACHE_HOST"] if "CACHE_HOST" in app.config else "localhost" - app.config["CACHE_PORT"]= app.config["CACHE_PORT"] if "CACHE_PORT" in app.config else 6379 - app.config["CACHE_DB"]= app.config["CACHE_DB"] if "CACHE_DB" in app.config else 0 - app.config["CACHE_DEFAULT_TIMEOUT"]= app.config["CACHE_DEFAULT_TIMEOUT"] if "CACHE_DEFAULT_TIMEOUT" in app.config else 86400 - app.config["CACHE_KEY_PREFIX"]= app.config["CACHE_KEY_PREFIX"] if "CACHE_KEY_PREFIX" in app.config else "fairhub-io#" + app.config["CACHE_URL"] = ( + app.config["CACHE_URL"] + if "CACHE_URL" in app.config + else "redis://127.0.0.1:6379" + ) + app.config["CACHE_HOST"] = ( + app.config["CACHE_HOST"] if "CACHE_HOST" in app.config else "localhost" + ) + app.config["CACHE_PORT"] = ( + app.config["CACHE_PORT"] if "CACHE_PORT" in app.config else 6379 + ) + app.config["CACHE_DB"] = ( + app.config["CACHE_DB"] if "CACHE_DB" in app.config else 0 + ) + app.config["CACHE_DEFAULT_TIMEOUT"] = ( + app.config["CACHE_DEFAULT_TIMEOUT"] + if "CACHE_DEFAULT_TIMEOUT" in app.config + else 86400 + ) + app.config["CACHE_KEY_PREFIX"] = ( + app.config["CACHE_KEY_PREFIX"] + if "CACHE_KEY_PREFIX" in app.config + else "fairhub-io#" + ) cache = Cache( config={ diff --git a/config.py b/config.py index c04ea65a..16e9416d 100644 --- a/config.py +++ b/config.py @@ -10,10 +10,12 @@ # Load environment variables from .env config = dotenv_values(".env") + def get_env(key): """Return environment variable from .env or native environment.""" return config.get(key) if LOCAL_ENV_FILE else environ.get(key) + FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py index 106c865f..49dbeecb 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap_project_api.py @@ -57,7 +57,7 @@ def update(self, data: dict): """Updates the study from a dictionary""" assignable = {key for key in self.to_dict().keys() if key.startswith("project")} for key, val in data.items(): - if (key in assignable): + if key in assignable: setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index 95f4c3af..111d8740 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -62,13 +62,14 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - assignable = {key for key in self.__dict__.keys() if key.startswith("dashboard")} + assignable = { + key for key in self.__dict__.keys() if key.startswith("dashboard") + } for key, val in data.items(): - if (key in assignable): + if key in assignable: setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() - def validate(self): """Validates the study""" violations: list = [] diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index a2073f77..6e6feeaf 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -7,6 +7,7 @@ import pandas as pd import numpy as np + class RedcapTransform(object): def __init__(self, config: dict) -> None: # From eceaa8f9ec719e47b5969973dda5bf556d850cc4 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Thu, 2 Nov 2023 13:22:32 -0700 Subject: [PATCH 335/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20return=20default?= =?UTF-8?q?=20port=20to=205000?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index db6dd334..b0feef30 100644 --- a/app.py +++ b/app.py @@ -237,7 +237,7 @@ def destroy_schema(): parser = ArgumentParser() parser.add_argument( - "-p", "--port", default=3001, type=int, help="port to listen on" + "-p", "--port", default=5000, type=int, help="port to listen on" ) args = parser.parse_args() port = args.port From 5357ec042e7a8b8d6cbf9682d2b14adfcb67c97e Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 19:12:28 -0700 Subject: [PATCH 336/505] =?UTF-8?q?=F0=9F=9A=A8=20fix:=20fix=20formatting?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap_data/__init__.py | 18 +++++++++--------- model/study_redcap_project_dashboard.py | 1 + 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/apis/redcap_data/__init__.py b/apis/redcap_data/__init__.py index 4f12cfc0..8b85fd76 100644 --- a/apis/redcap_data/__init__.py +++ b/apis/redcap_data/__init__.py @@ -1,9 +1,9 @@ -from .redcap_project_data import RedcapProjectDataResource -from .redcap_report_participant_values_data import ( - RedcapReportParticipantValuesDataResource, -) -from .redcap_report_participants_data import RedcapReportParticipantsDataResource -from .redcap_report_repeat_surveys_data import RedcapReportRepeatSurveysDataResource -from .redcap_report_survey_completions_data import ( - RedcapReportSurveyCompletionsDataResource, -) +# from .redcap_project_data import RedcapProjectDataResource +# from .redcap_report_participant_values_data import ( +# RedcapReportParticipantValuesDataResource, +# ) +# from .redcap_report_participants_data import RedcapReportParticipantsDataResource +# from .redcap_report_repeat_surveys_data import RedcapReportRepeatSurveysDataResource +# from .redcap_report_survey_completions_data import ( +# RedcapReportSurveyCompletionsDataResource, +# ) diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index 111d8740..c52d04d1 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -5,6 +5,7 @@ from sqlalchemy.dialects.postgresql import ARRAY from model import Study + from .db import db From a6e1962448c3170fcccc6216208b99e8a47c2e55 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 19:15:21 -0700 Subject: [PATCH 337/505] =?UTF-8?q?=F0=9F=9A=A8=20fix:=20fix=20flake=20err?= =?UTF-8?q?ors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap_data/redcap_project_data.py | 12 +++++------- .../redcap_report_participant_values_data.py | 9 ++++----- apis/redcap_data/redcap_report_participants_data.py | 8 ++++---- .../redcap_data/redcap_report_repeat_surveys_data.py | 8 ++++---- .../redcap_report_survey_completions_data.py | 8 ++++---- 5 files changed, 21 insertions(+), 24 deletions(-) diff --git a/apis/redcap_data/redcap_project_data.py b/apis/redcap_data/redcap_project_data.py index f2cec382..babebcb7 100644 --- a/apis/redcap_data/redcap_project_data.py +++ b/apis/redcap_data/redcap_project_data.py @@ -1,14 +1,14 @@ """API routes for redcap project""" -import typing +# import typing -from flask import request +# from flask import request from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +# from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -from ..authentication import is_granted +# from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig @@ -87,7 +87,6 @@ class RedcapProjectDataResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_data) - # @cache.cached() def get(self, study_id: int, redcap_project_id: str): """ Get REDCap project @@ -101,5 +100,4 @@ def get(self, study_id: int, redcap_project_id: str): PyCapProject = Project( study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] ) - project = PyCapProject.export_project_info() - return project + return PyCapProject.export_project_info() diff --git a/apis/redcap_data/redcap_report_participant_values_data.py b/apis/redcap_data/redcap_report_participant_values_data.py index a69cca2e..06e27c1a 100644 --- a/apis/redcap_data/redcap_report_participant_values_data.py +++ b/apis/redcap_data/redcap_report_participant_values_data.py @@ -1,14 +1,14 @@ """API routes for redcap report participant values data""" -import typing +# import typing -from flask import request +# from flask import request from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +# from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -from ..authentication import is_granted +# from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig @@ -37,7 +37,6 @@ "dm": fields.String( required=True, readonly=True, description="Data approved for Fairhub.io" ), - "siteid": fields.String(required=True, readonly=True, description="Site ID"), "genderid": fields.String( required=True, readonly=True, description="Gender identity" ), diff --git a/apis/redcap_data/redcap_report_participants_data.py b/apis/redcap_data/redcap_report_participants_data.py index eed60162..b20df8cf 100644 --- a/apis/redcap_data/redcap_report_participants_data.py +++ b/apis/redcap_data/redcap_report_participants_data.py @@ -1,14 +1,14 @@ """API routes for redcap report participants data data""" -import typing +# import typing -from flask import request +# from flask import request from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +# from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -from ..authentication import is_granted +# from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig diff --git a/apis/redcap_data/redcap_report_repeat_surveys_data.py b/apis/redcap_data/redcap_report_repeat_surveys_data.py index 79bd9c29..6c94b6c2 100644 --- a/apis/redcap_data/redcap_report_repeat_surveys_data.py +++ b/apis/redcap_data/redcap_report_repeat_surveys_data.py @@ -1,14 +1,14 @@ """API routes for redcap report repeat surveys data""" -import typing +# import typing -from flask import request +# from flask import request from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +# from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -from ..authentication import is_granted +# from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig diff --git a/apis/redcap_data/redcap_report_survey_completions_data.py b/apis/redcap_data/redcap_report_survey_completions_data.py index 318ba229..f6994cc9 100644 --- a/apis/redcap_data/redcap_report_survey_completions_data.py +++ b/apis/redcap_data/redcap_report_survey_completions_data.py @@ -1,14 +1,14 @@ """API routes for redcap report survey completions data""" -import typing +# import typing -from flask import request +# from flask import request from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +# from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -from ..authentication import is_granted +# from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig From dc24de63178aaee6122e89949a94b4e04ace186e Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sat, 4 Nov 2023 02:15:44 +0000 Subject: [PATCH 338/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap_data/redcap_project_data.py | 1 + apis/redcap_data/redcap_report_participant_values_data.py | 1 + apis/redcap_data/redcap_report_participants_data.py | 1 + apis/redcap_data/redcap_report_repeat_surveys_data.py | 1 + apis/redcap_data/redcap_report_survey_completions_data.py | 1 + 5 files changed, 5 insertions(+) diff --git a/apis/redcap_data/redcap_project_data.py b/apis/redcap_data/redcap_project_data.py index babebcb7..bd698330 100644 --- a/apis/redcap_data/redcap_project_data.py +++ b/apis/redcap_data/redcap_project_data.py @@ -3,6 +3,7 @@ # from flask import request from flask_restx import Resource, fields + # from jsonschema import ValidationError, validate import model diff --git a/apis/redcap_data/redcap_report_participant_values_data.py b/apis/redcap_data/redcap_report_participant_values_data.py index 06e27c1a..fae04810 100644 --- a/apis/redcap_data/redcap_report_participant_values_data.py +++ b/apis/redcap_data/redcap_report_participant_values_data.py @@ -3,6 +3,7 @@ # from flask import request from flask_restx import Resource, fields + # from jsonschema import ValidationError, validate import model diff --git a/apis/redcap_data/redcap_report_participants_data.py b/apis/redcap_data/redcap_report_participants_data.py index b20df8cf..0ca75569 100644 --- a/apis/redcap_data/redcap_report_participants_data.py +++ b/apis/redcap_data/redcap_report_participants_data.py @@ -3,6 +3,7 @@ # from flask import request from flask_restx import Resource, fields + # from jsonschema import ValidationError, validate import model diff --git a/apis/redcap_data/redcap_report_repeat_surveys_data.py b/apis/redcap_data/redcap_report_repeat_surveys_data.py index 6c94b6c2..b0713226 100644 --- a/apis/redcap_data/redcap_report_repeat_surveys_data.py +++ b/apis/redcap_data/redcap_report_repeat_surveys_data.py @@ -3,6 +3,7 @@ # from flask import request from flask_restx import Resource, fields + # from jsonschema import ValidationError, validate import model diff --git a/apis/redcap_data/redcap_report_survey_completions_data.py b/apis/redcap_data/redcap_report_survey_completions_data.py index f6994cc9..b1cff193 100644 --- a/apis/redcap_data/redcap_report_survey_completions_data.py +++ b/apis/redcap_data/redcap_report_survey_completions_data.py @@ -3,6 +3,7 @@ # from flask import request from flask_restx import Resource, fields + # from jsonschema import ValidationError, validate import model From 73ee3b1fdfa4df8038194a63aa16d11a3abf1505 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 19:17:17 -0700 Subject: [PATCH 339/505] =?UTF-8?q?=F0=9F=9A=A8=20fix:=20fix=20mypy=20erro?= =?UTF-8?q?rs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap.py | 4 ++++ pyproject.toml | 11 +++-------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/apis/redcap.py b/apis/redcap.py index db4e002a..57e94cae 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -93,6 +93,10 @@ def get(self, study_id: int): redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get( project_id ) + + if redcap_project_view is None: + return {"error": "Project not found"}, 404 + return redcap_project_view.to_dict(), 201 @api.response(200, "Success") diff --git a/pyproject.toml b/pyproject.toml index 5575bd36..cbb04e42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,9 +13,7 @@ documentation = "https://pyfairdatatools.readthedocs.io" repository = "https://github.com/AI-READI/pyfairdatatools" - -keywords = [ -] +keywords = [] classifiers = [ # TODO: update this list to match your application: https://pypi.org/pypi?%3Aaction=list_classifiers "Development Status :: 1 - Planning", @@ -123,7 +121,7 @@ lint = ["flake8", "typecheck", "pylint"] precommit = ["format", "flake8", "typecheck", "pylint"] -test = "pytest -rx -W ignore::DeprecationWarning" +test = "pytest -rx -W ignore::DeprecationWarning" test_with_capture = "pytest -s -W ignore::DeprecationWarning" jupyter = "jupyter notebook" @@ -160,10 +158,7 @@ addopts = """ --no-cov-on-fail """ -filterwarnings = [ - "ignore", - "default:::flask_restx.*", -] +filterwarnings = ["ignore", "default:::flask_restx.*"] cache_dir = ".cache/pytest/" From 4b836fb01a3956b354b3fd71797a3ae422d2a4ea Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sat, 4 Nov 2023 02:17:43 +0000 Subject: [PATCH 340/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/apis/redcap.py b/apis/redcap.py index 57e94cae..d12f3a72 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -93,10 +93,10 @@ def get(self, study_id: int): redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get( project_id ) - + if redcap_project_view is None: return {"error": "Project not found"}, 404 - + return redcap_project_view.to_dict(), 201 @api.response(200, "Success") From 6cc07888f6a460e63e647ea36806ae6c989ab2ec Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 19:32:39 -0700 Subject: [PATCH 341/505] =?UTF-8?q?=F0=9F=9A=A8=20fix:=20fix=20pylint=20er?= =?UTF-8?q?rors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/redcap.py | 36 ++++++++++++------- apis/redcap_data/redcap_project_data.py | 6 +++- .../redcap_report_participant_values_data.py | 6 ++-- .../redcap_report_participants_data.py | 4 ++- .../redcap_report_repeat_surveys_data.py | 4 ++- .../redcap_report_survey_completions_data.py | 4 ++- app.py | 2 +- model/study_redcap_project_api.py | 6 +++- model/study_redcap_project_dashboard.py | 4 ++- 9 files changed, 50 insertions(+), 22 deletions(-) diff --git a/apis/redcap.py b/apis/redcap.py index 57e94cae..0a52a4ad 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -56,6 +56,8 @@ @api.route("/study//redcap/all") class RedcapProjectAPIs(Resource): + """Study Redcap Metadata""" + @api.doc("redcap_project_apis") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -80,6 +82,8 @@ def get(self, study_id: int): @api.route("/study//redcap") class RedcapProjectAPI(Resource): + """Study Redcap Metadata""" + @api.doc(parser=project_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @@ -93,10 +97,10 @@ def get(self, study_id: int): redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get( project_id ) - + if redcap_project_view is None: return {"error": "Project not found"}, 404 - + return redcap_project_view.to_dict(), 201 @api.response(200, "Success") @@ -132,22 +136,23 @@ def put(self, study_id: int): if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: {data['project_title']}", + f"redcap project_title is required for redcap access: {data['project_title']}", # noqa E501 # pylint: disable=line-too-long 400, ) if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: {data['project_id']}", + f"redcap project_id is required for redcap access: {data['project_id']}", # noqa E501 # pylint: disable=line-too-long 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: {data['project_api_url']}", + f"redcap project_api_url is required for redcap access: {data['project_api_url']}", # noqa E501 # pylint: disable=line-too-long 400, ) - if type(data["project_api_active"]) is not bool: + + if isinstance(data["project_api_active"], bool) is False: return ( - f"redcap project_api_active is required for redcap access: {data['project_api_active']}", + f"redcap project_api_active is required for redcap access: {data['project_api_active']}", # noqa E501 # pylint: disable=line-too-long 400, ) @@ -161,6 +166,8 @@ def put(self, study_id: int): @api.route("/study//redcap/add") class AddRedcapProjectAPI(Resource): + """Study Redcap Metadata""" + @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) @@ -197,27 +204,27 @@ def post(self, study_id: int): if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: {data['project_title']}", + f"redcap project_title is required for redcap access: {data['project_title']}", # noqa E501 # pylint: disable=line-too-long 400, ) if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: {data['project_id']}", + f"redcap project_id is required for redcap access: {data['project_id']}", # noqa E501 # pylint: disable=line-too-long 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: {data['project_api_url']}", + f"redcap project_api_url is required for redcap access: {data['project_api_url']}", # noqa E501 # pylint: disable=line-too-long 400, ) if len(data["project_api_key"]) < 1: return ( - f"redcap project_api_key is required for redcap access: {data['project_api_key']}", + f"redcap project_api_key is required for redcap access: {data['project_api_key']}", # noqa E501 # pylint: disable=line-too-long 400, ) - if type(data["project_api_active"]) is not bool: + if isinstance(data["project_api_active"], bool) is False: return ( - f"redcap project_api_active is required for redcap access: {data['project_api_active']}", + f"redcap project_api_active is required for redcap access: {data['project_api_active']}", # noqa E501 # pylint: disable=line-too-long 400, ) @@ -229,6 +236,8 @@ def post(self, study_id: int): @api.route("/study//redcap/delete") class DeleteRedcapProjectAPI(Resource): + """Study Redcap Metadata""" + @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) @@ -300,6 +309,7 @@ def get(self, study_id: int, project_id: str, dashboard_id: str): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) def put(self, study_id: int): + """Update study redcap""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 diff --git a/apis/redcap_data/redcap_project_data.py b/apis/redcap_data/redcap_project_data.py index bd698330..b49e2d51 100644 --- a/apis/redcap_data/redcap_project_data.py +++ b/apis/redcap_data/redcap_project_data.py @@ -84,11 +84,15 @@ @api.route("/study//redcap//project") class RedcapProjectDataResource(Resource): + """RedcapProjectDataResource""" + @api.doc("project") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_data) - def get(self, study_id: int, redcap_project_id: str): + def get( + self, study_id: int, redcap_project_id: str + ): # pylint: disable=unused-argument """ Get REDCap project diff --git a/apis/redcap_data/redcap_report_participant_values_data.py b/apis/redcap_data/redcap_report_participant_values_data.py index fae04810..7240e5e9 100644 --- a/apis/redcap_data/redcap_report_participant_values_data.py +++ b/apis/redcap_data/redcap_report_participant_values_data.py @@ -96,7 +96,7 @@ "dvrtnyn": fields.String( required=True, readonly=True, - description="Was the participant given device return instructions and shipping materials?", + description="Was the participant given device return instructions and shipping materials?", # noqa: E501 # pylint: disable=line-too-long ), "dvrtnship": fields.String( required=True, readonly=True, description="Return shipping tracking number" @@ -152,7 +152,9 @@ class RedcapReportParticipantValuesDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_participant_values_data) # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): + def get( + self, study_id: int, redcap_project_id: str + ): # pylint: disable=unused-argument study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/apis/redcap_data/redcap_report_participants_data.py b/apis/redcap_data/redcap_report_participants_data.py index 0ca75569..7f562bba 100644 --- a/apis/redcap_data/redcap_report_participants_data.py +++ b/apis/redcap_data/redcap_report_participants_data.py @@ -54,7 +54,9 @@ class RedcapReportParticipantsDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_participants_data) # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): + def get( + self, study_id: int, redcap_project_id: str + ): # pylint: disable=unused-argument study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/apis/redcap_data/redcap_report_repeat_surveys_data.py b/apis/redcap_data/redcap_report_repeat_surveys_data.py index b0713226..f25656bb 100644 --- a/apis/redcap_data/redcap_report_repeat_surveys_data.py +++ b/apis/redcap_data/redcap_report_repeat_surveys_data.py @@ -60,7 +60,9 @@ class RedcapReportRepeatSurveysDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_repeat_surveys_data) # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): + def get( + self, study_id: int, redcap_project_id: str + ): # pylint: disable=unused-argument study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/apis/redcap_data/redcap_report_survey_completions_data.py b/apis/redcap_data/redcap_report_survey_completions_data.py index b1cff193..e1a8fb2f 100644 --- a/apis/redcap_data/redcap_report_survey_completions_data.py +++ b/apis/redcap_data/redcap_report_survey_completions_data.py @@ -172,7 +172,9 @@ class RedcapReportSurveyCompletionsDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_survey_completions_data) # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): + def get( + self, study_id: int, redcap_project_id: str + ): # pylint: disable=unused-argument study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/app.py b/app.py index b0feef30..c1f15521 100644 --- a/app.py +++ b/app.py @@ -116,7 +116,7 @@ def create_schema(): table_names = [table.name for table in metadata.tables.values()] if len(table_names) == 0: with engine.begin(): - """Create the database schema.""" + # Create the database schema model.db.create_all() @app.before_request diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py index 49dbeecb..7f94b6d7 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap_project_api.py @@ -55,7 +55,11 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - assignable = {key for key in self.to_dict().keys() if key.startswith("project")} + assignable = { + key + for key in self.to_dict().keys() # pylint: disable=consider-iterating-dictionary + if key.startswith("project") + } for key, val in data.items(): if key in assignable: setattr(self, key, val) diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index c52d04d1..c3963bb1 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -64,7 +64,9 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" assignable = { - key for key in self.__dict__.keys() if key.startswith("dashboard") + key + for key in self.__dict__.keys() # pylint: disable=consider-iterating-dictionary + if key.startswith("dashboard") } for key, val in data.items(): if key in assignable: From eb2962a2864717ce7a25785fa5f5a5d6ca78a905 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 19:45:14 -0700 Subject: [PATCH 342/505] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20chore:=20update=20?= =?UTF-8?q?to=20python=203.10?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/deploy-app-to-main-slot.yml | 2 +- .../workflows/deploy-app-to-staging-slot.yml | 2 +- .github/workflows/lint.yml | 2 +- .github/workflows/test.yml | 2 +- Dockerfile | 2 +- README.md | 5 +- poetry.lock | 181 ++++++++---------- pyproject.toml | 2 +- 8 files changed, 94 insertions(+), 104 deletions(-) diff --git a/.github/workflows/deploy-app-to-main-slot.yml b/.github/workflows/deploy-app-to-main-slot.yml index 1ae12f5c..66d0a63b 100644 --- a/.github/workflows/deploy-app-to-main-slot.yml +++ b/.github/workflows/deploy-app-to-main-slot.yml @@ -24,7 +24,7 @@ jobs: - name: Set up Python version uses: actions/setup-python@v1 with: - python-version: "3.8" + python-version: "3.10" - name: Create and start virtual environment run: | diff --git a/.github/workflows/deploy-app-to-staging-slot.yml b/.github/workflows/deploy-app-to-staging-slot.yml index 65ba15ef..9b0eaf20 100644 --- a/.github/workflows/deploy-app-to-staging-slot.yml +++ b/.github/workflows/deploy-app-to-staging-slot.yml @@ -24,7 +24,7 @@ jobs: - name: Set up Python version uses: actions/setup-python@v1 with: - python-version: "3.8" + python-version: "3.10" - name: Create and start virtual environment run: | diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 30adb21d..44975a75 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8"] + python-version: ["3.10"] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1f9a28ad..3d0b6984 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8"] + python-version: ["3.10"] env: # These are simulated secrets for test workflow only. FAIRHUB_DATABASE_URL: postgresql://admin:root@localhost:5432/fairhub_local diff --git a/Dockerfile b/Dockerfile index 24060626..8b5c0b9e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8-alpine +FROM python:3.10-alpine EXPOSE 5000 diff --git a/README.md b/README.md index 96ab1847..8d9346c1 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ You will need the following installed on your system: -- Python 3.8+ +- Python 3.10+ - [Pip](https://pip.pypa.io/en/stable/) - [Poetry](https://python-poetry.org/) - [Docker](https://www.docker.com/) @@ -25,7 +25,7 @@ If you would like to update the api, please follow the instructions below. If you are using Anaconda, you can create a virtual environment with: ```bash - conda create -n fairhub-api-dev-env python=3.8 + conda create -n fairhub-api-dev-env python=3.10 conda activate fairhub-api-dev-env ``` @@ -104,6 +104,7 @@ This database will not persist data between runs. ### Caching The api uses a redis cache. You can run a redis cache locally using docker, too: + ```bash docker-compose -f ./cache-docker-compose.yaml up ``` diff --git a/poetry.lock b/poetry.lock index b6a3420b..7e99f546 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1620,14 +1620,14 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", [[package]] name = "jupyterlab" -version = "4.0.7" +version = "4.0.8" description = "JupyterLab computational environment" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.7-py3-none-any.whl", hash = "sha256:08683045117cc495531fdb39c22ababb9aaac6977a45e67cfad20046564c9c7c"}, - {file = "jupyterlab-4.0.7.tar.gz", hash = "sha256:48792efd9f962b2bcda1f87d72168ff122c288b1d97d32109e4a11b33dc862be"}, + {file = "jupyterlab-4.0.8-py3-none-any.whl", hash = "sha256:2ff5aa2a51eb21df241d6011c236e88bd1ff9a5dbb75bebc54472f9c18bfffa4"}, + {file = "jupyterlab-4.0.8.tar.gz", hash = "sha256:c4fe93f977bcc987bd395d7fae5ab02e0c042bf4e0f7c95196f3e2e578c2fb3a"}, ] [package.dependencies] @@ -1645,7 +1645,7 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.7.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.286)"] +dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.292)"] docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] @@ -1789,6 +1789,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2431,6 +2441,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -2469,14 +2481,14 @@ tests = ["pytest"] [[package]] name = "pycap" -version = "2.5.0" +version = "2.6.0" description = "PyCap: Python interface to REDCap" category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "pycap-2.5.0-py3-none-any.whl", hash = "sha256:f483e0c8405f4dc904932d79d4d6076bd508a29e0c1e4636443316844109e9b5"}, - {file = "pycap-2.5.0.tar.gz", hash = "sha256:3c61e5cab844e4dec1be6318eac2fe639161221cad62a0fa5526c573e07cc406"}, + {file = "pycap-2.6.0-py3-none-any.whl", hash = "sha256:404a7ba299fa57f0fcadd9f4b6df123e593deda1dcb12b341f39b416b6e83d6b"}, + {file = "pycap-2.6.0.tar.gz", hash = "sha256:68d7403bf573b03ae24cb252fb1e5f73fe365b6c9d54c46199014edaffcc8f94"}, ] [package.dependencies] @@ -2876,6 +2888,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2883,8 +2896,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2901,6 +2921,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2908,6 +2929,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3247,61 +3269,61 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.22" +version = "2.0.23" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f146c61ae128ab43ea3a0955de1af7e1633942c2b2b4985ac51cc292daf33222"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:875de9414393e778b655a3d97d60465eb3fae7c919e88b70cc10b40b9f56042d"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13790cb42f917c45c9c850b39b9941539ca8ee7917dacf099cc0b569f3d40da7"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e04ab55cf49daf1aeb8c622c54d23fa4bec91cb051a43cc24351ba97e1dd09f5"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a42c9fa3abcda0dcfad053e49c4f752eef71ecd8c155221e18b99d4224621176"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:14cd3bcbb853379fef2cd01e7c64a5d6f1d005406d877ed9509afb7a05ff40a5"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-win32.whl", hash = "sha256:d143c5a9dada696bcfdb96ba2de4a47d5a89168e71d05a076e88a01386872f97"}, - {file = "SQLAlchemy-2.0.22-cp310-cp310-win_amd64.whl", hash = "sha256:ccd87c25e4c8559e1b918d46b4fa90b37f459c9b4566f1dfbce0eb8122571547"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f6ff392b27a743c1ad346d215655503cec64405d3b694228b3454878bf21590"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f776c2c30f0e5f4db45c3ee11a5f2a8d9de68e81eb73ec4237de1e32e04ae81c"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8f1792d20d2f4e875ce7a113f43c3561ad12b34ff796b84002a256f37ce9437"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80eeb5189d7d4b1af519fc3f148fe7521b9dfce8f4d6a0820e8f5769b005051"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69fd9e41cf9368afa034e1c81f3570afb96f30fcd2eb1ef29cb4d9371c6eece2"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54bcceaf4eebef07dadfde424f5c26b491e4a64e61761dea9459103ecd6ccc95"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-win32.whl", hash = "sha256:7ee7ccf47aa503033b6afd57efbac6b9e05180f492aeed9fcf70752556f95624"}, - {file = "SQLAlchemy-2.0.22-cp311-cp311-win_amd64.whl", hash = "sha256:b560f075c151900587ade06706b0c51d04b3277c111151997ea0813455378ae0"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2c9bac865ee06d27a1533471405ad240a6f5d83195eca481f9fc4a71d8b87df8"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:625b72d77ac8ac23da3b1622e2da88c4aedaee14df47c8432bf8f6495e655de2"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39a6e21110204a8c08d40ff56a73ba542ec60bab701c36ce721e7990df49fb9"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53a766cb0b468223cafdf63e2d37f14a4757476157927b09300c8c5832d88560"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0e1ce8ebd2e040357dde01a3fb7d30d9b5736b3e54a94002641dfd0aa12ae6ce"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:505f503763a767556fa4deae5194b2be056b64ecca72ac65224381a0acab7ebe"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-win32.whl", hash = "sha256:154a32f3c7b00de3d090bc60ec8006a78149e221f1182e3edcf0376016be9396"}, - {file = "SQLAlchemy-2.0.22-cp312-cp312-win_amd64.whl", hash = "sha256:129415f89744b05741c6f0b04a84525f37fbabe5dc3774f7edf100e7458c48cd"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3940677d341f2b685a999bffe7078697b5848a40b5f6952794ffcf3af150c301"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55914d45a631b81a8a2cb1a54f03eea265cf1783241ac55396ec6d735be14883"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2096d6b018d242a2bcc9e451618166f860bb0304f590d205173d317b69986c95"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:19c6986cf2fb4bc8e0e846f97f4135a8e753b57d2aaaa87c50f9acbe606bd1db"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ac28bd6888fe3c81fbe97584eb0b96804bd7032d6100b9701255d9441373ec1"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-win32.whl", hash = "sha256:cb9a758ad973e795267da334a92dd82bb7555cb36a0960dcabcf724d26299db8"}, - {file = "SQLAlchemy-2.0.22-cp37-cp37m-win_amd64.whl", hash = "sha256:40b1206a0d923e73aa54f0a6bd61419a96b914f1cd19900b6c8226899d9742ad"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3aa1472bf44f61dd27987cd051f1c893b7d3b17238bff8c23fceaef4f1133868"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:56a7e2bb639df9263bf6418231bc2a92a773f57886d371ddb7a869a24919face"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccca778c0737a773a1ad86b68bda52a71ad5950b25e120b6eb1330f0df54c3d0"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6c3e9350f9fb16de5b5e5fbf17b578811a52d71bb784cc5ff71acb7de2a7f9"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:564e9f9e4e6466273dbfab0e0a2e5fe819eec480c57b53a2cdee8e4fdae3ad5f"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:af66001d7b76a3fab0d5e4c1ec9339ac45748bc4a399cbc2baa48c1980d3c1f4"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-win32.whl", hash = "sha256:9e55dff5ec115316dd7a083cdc1a52de63693695aecf72bc53a8e1468ce429e5"}, - {file = "SQLAlchemy-2.0.22-cp38-cp38-win_amd64.whl", hash = "sha256:4e869a8ff7ee7a833b74868a0887e8462445ec462432d8cbeff5e85f475186da"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9886a72c8e6371280cb247c5d32c9c8fa141dc560124348762db8a8b236f8692"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a571bc8ac092a3175a1d994794a8e7a1f2f651e7c744de24a19b4f740fe95034"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8db5ba8b7da759b727faebc4289a9e6a51edadc7fc32207a30f7c6203a181592"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b0b3f2686c3f162123adba3cb8b626ed7e9b8433ab528e36ed270b4f70d1cdb"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0c1fea8c0abcb070ffe15311853abfda4e55bf7dc1d4889497b3403629f3bf00"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4bb062784f37b2d75fd9b074c8ec360ad5df71f933f927e9e95c50eb8e05323c"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-win32.whl", hash = "sha256:58a3aba1bfb32ae7af68da3f277ed91d9f57620cf7ce651db96636790a78b736"}, - {file = "SQLAlchemy-2.0.22-cp39-cp39-win_amd64.whl", hash = "sha256:92e512a6af769e4725fa5b25981ba790335d42c5977e94ded07db7d641490a85"}, - {file = "SQLAlchemy-2.0.22-py3-none-any.whl", hash = "sha256:3076740335e4aaadd7deb3fe6dcb96b3015f1613bd190a4e1634e1b99b02ec86"}, - {file = "SQLAlchemy-2.0.22.tar.gz", hash = "sha256:5434cc601aa17570d79e5377f5fd45ff92f9379e2abed0be5e8c2fba8d353d2b"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, + {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, + {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, ] [package.dependencies] @@ -3310,6 +3332,7 @@ typing-extensions = ">=4.2.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] @@ -3320,7 +3343,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)"] +oracle = ["cx-oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -3406,14 +3429,14 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.1" +version = "0.12.2" description = "Style preserving TOML library" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, + {file = "tomlkit-0.12.2-py3-none-any.whl", hash = "sha256:eeea7ac7563faeab0a1ed8fe12c2e5a51c61f933f2502f7e9db0241a65163ad0"}, + {file = "tomlkit-0.12.2.tar.gz", hash = "sha256:df32fab589a81f0d7dc525a4267b6d7a64ee99619cbd1eeb0fae32c1dd426977"}, ] [[package]] @@ -3458,32 +3481,6 @@ name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" category = "dev" -<<<<<<< HEAD -======= -optional = false -python-versions = "*" -files = [ - {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, - {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, -] - -[[package]] -name = "types-python-dateutil" -version = "2.8.19.14" -description = "Typing stubs for python-dateutil" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, - {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, -] - -[[package]] -name = "types-requests" -version = "2.31.0.2" -description = "Typing stubs for requests" ->>>>>>> fb3fb5cace0ebc1c41c9914afad862ec4a889831 optional = false python-versions = "*" files = [ @@ -3821,13 +3818,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -<<<<<<< HEAD python-versions = ">=3.10.12,<3.13" -content-hash = "fb9ccdfdb1d36a84cc2af2f9540de70bb14027c6d3e6b628c2c123092543413c" -======= -python-versions = "^3.8.16" -content-hash = "e3e1b2d0645e5cd7ad0281091d65e85b411eab0ddd7c475762e908bf9c10bdb4" -<<<<<<< HEAD ->>>>>>> fb3fb5cace0ebc1c41c9914afad862ec4a889831 -======= ->>>>>>> de7b910e50a8efb96b387cf59eae8aa573d0c9e6 +content-hash = "93ee3c0f5e7b66ce63f0b9a6c6d883aa8d4be5729f76b89b61f16dd9525bdf58" diff --git a/pyproject.toml b/pyproject.toml index cbb04e42..231652a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.10", ] From 76b4fd270db295e5fcb68fc0f3d0ac01916745a0 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 20:24:43 -0700 Subject: [PATCH 343/505] =?UTF-8?q?=E2=9C=85=20test:=20update=20configs=20?= =?UTF-8?q?for=20cache?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test.yml | 12 +++++++++++- apis/authentication.py | 8 ++++---- app.py | 11 ++++++----- config.py | 16 +++++++++------- pytest_config.py | 8 ++++++++ tests/conftest.py | 4 ++-- 6 files changed, 40 insertions(+), 19 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3d0b6984..d01e5f21 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,13 +13,23 @@ jobs: # These are simulated secrets for test workflow only. FAIRHUB_DATABASE_URL: postgresql://admin:root@localhost:5432/fairhub_local FAIRHUB_SECRET: mXrkOHXXQoMAhCOTZOV93QlncmeTwEZFPxTP1TXGiOFabE0KmuZgHWvTOLgjbv3S + FAIRHUB_CACHE_DEFAULT_TIMEOUT: 86400 + FAIRHUB_CACHE_KEY_PREFIX: fairhub-io# + FAIRHUB_CACHE_HOST: localhost + FAIRHUB_CACHE_PORT: 6379 + FAIRHUB_CACHE_DB: fairhub + FAIRHUB_CACHE_URL: redis://127.0.0.1:6379 + FAIRHUB_CACHE_TYPE: RedisCache steps: - uses: actions/checkout@v2 - - name: Crate a database for tests with docker + - name: Create a database for tests with docker run: docker run --name postgres -p 5432:5432 -e POSTGRES_USER=admin -e POSTGRES_PASSWORD=root -e POSTGRES_DB=fairhub_local -d postgres:latest + - name: Create a redis instance for tests with docker + run: docker run --name cache -p 6379:6379 -e CACHE_DB=fairhub CACHE_HOST=localhost CACHE_PORT=6379 CACHE_URL=redis://127.0.0.1:6379 CACHE_KEY_PREFIX=fairhub-io# CACHE_TIMEOUT=86400 -d redis:7.2-alpine + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: diff --git a/apis/authentication.py b/apis/authentication.py index c5d01d98..69379f10 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -22,7 +22,7 @@ signup_model = api.model( "Signup", { - "email_address": fields.String(required=True, default="sample@gmail.com"), + "email_address": fields.String(required=True, default=""), "password": fields.String(required=True, default=""), "code": fields.String(required=True, default=""), }, @@ -58,6 +58,7 @@ def post(self): if os.environ.get("FLASK_ENV") != "testing": bypassed_emails = [ "test@fairhub.io", + "ymir@fairhub.io", "bpatel@fairhub.io", "sanjay@fairhub.io", "aydan@fairhub.io", @@ -116,8 +117,7 @@ def validate_password(instance): # Schema validation schema = { "type": "object", - # "required": ["email_address", "password", "code"], - "required": ["email_address", "password"], + "required": ["email_address", "password", "code"], "additionalProperties": False, "properties": { "email_address": {"type": "string", "format": "valid_email"}, @@ -125,7 +125,7 @@ def validate_password(instance): "type": "string", "format": "password", }, - # "code": {"type": "string"}, + "code": {"type": "string"}, }, } diff --git a/app.py b/app.py index c1f15521..3590b35f 100644 --- a/app.py +++ b/app.py @@ -42,7 +42,9 @@ def create_app(config_module=None): # csrf = CSRFProtect() # csrf.init_app(app) + # All configuration variables that start with FAIRHUB_ will be loaded app.config.from_prefixed_env("FAIRHUB") + if config.FAIRHUB_SECRET: if len(config.FAIRHUB_SECRET) < 32: raise RuntimeError("FAIRHUB_SECRET must be at least 32 characters long") @@ -50,15 +52,14 @@ def create_app(config_module=None): raise RuntimeError("FAIRHUB_SECRET not set") if "DATABASE_URL" in app.config: - # if "TESTING" in app_config and app_config["TESTING"]: - # pass - # else: - # print("DATABASE_URL: ", app.config["DATABASE_URL"]) app.config["SQLALCHEMY_DATABASE_URI"] = app.config["DATABASE_URL"] else: - # throw error raise RuntimeError("FAIRHUB_DATABASE_URL not set") + # Testing for only one of the 5 required cache variables + if "CACHE_URL" not in app.config: + raise RuntimeError("FAIRHUB_CACHE_URL not set") + cache_config = { key: value for key, value in app.config.items() diff --git a/config.py b/config.py index 16e9416d..57cea136 100644 --- a/config.py +++ b/config.py @@ -18,12 +18,14 @@ def get_env(key): FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") + FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") -CACHE_DEFAULT_TIMEOUT = get_env("CACHE_DEFAULT_TIMEOUT") -CACHE_KEY_PREFIX = get_env("CACHE_KEY_PREFIX") -CACHE_HOST = get_env("CACHE_HOST") -CACHE_PORT = get_env("CACHE_PORT") -CACHE_DB = get_env("CACHE_DB") -CACHE_URL = get_env("CACHE_URL") -CACHE_TYPE = get_env("CACHE_TYPE") + +FAIRHUB_CACHE_DEFAULT_TIMEOUT = get_env("FAIRHUB_CACHE_DEFAULT_TIMEOUT") +FAIRHUB_CACHE_KEY_PREFIX = get_env("FAIRHUB_CACHE_KEY_PREFIX") +FAIRHUB_CACHE_HOST = get_env("FAIRHUB_CACHE_HOST") +FAIRHUB_CACHE_PORT = get_env("FAIRHUB_CACHE_PORT") +FAIRHUB_CACHE_DB = get_env("FAIRHUB_CACHE_DB") +FAIRHUB_CACHE_URL = get_env("FAIRHUB_CACHE_URL") +FAIRHUB_CACHE_TYPE = get_env("FAIRHUB_CACHE_TYPE") diff --git a/pytest_config.py b/pytest_config.py index 9336ba61..1abce106 100644 --- a/pytest_config.py +++ b/pytest_config.py @@ -20,4 +20,12 @@ class TestConfig: FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") + FAIRHUB_CACHE_DEFAULT_TIMEOUT = get_env("FAIRHUB_CACHE_DEFAULT_TIMEOUT") + FAIRHUB_CACHE_KEY_PREFIX = get_env("FAIRHUB_CACHE_KEY_PREFIX") + FAIRHUB_CACHE_HOST = get_env("FAIRHUB_CACHE_HOST") + FAIRHUB_CACHE_PORT = get_env("FAIRHUB_CACHE_PORT") + FAIRHUB_CACHE_DB = get_env("FAIRHUB_CACHE_DB") + FAIRHUB_CACHE_URL = get_env("FAIRHUB_CACHE_URL") + FAIRHUB_CACHE_TYPE = get_env("FAIRHUB_CACHE_TYPE") + TESTING = True diff --git a/tests/conftest.py b/tests/conftest.py index 32c2f8e7..37c2d811 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -64,7 +64,7 @@ def _create_user(_test_client): response = _test_client.post( "/auth/signup", json={ - "email_address": "sample@gmail.com", + "email_address": "ymir@fairhub.io", "password": "Testingyeshello11!", "code": "7654321", }, @@ -81,7 +81,7 @@ def _login_user(_test_client): response = _test_client.post( "/auth/login", json={ - "email_address": "sample@gmail.com", + "email_address": "ymir@fairhub.io", "password": "Testingyeshello11!", }, ) From 287aee34e9916b6259e9c244eef0b32463f072df Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 20:27:04 -0700 Subject: [PATCH 344/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20update=20ci=20test?= =?UTF-8?q?=20containers?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d01e5f21..ef1013d6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,7 +28,7 @@ jobs: run: docker run --name postgres -p 5432:5432 -e POSTGRES_USER=admin -e POSTGRES_PASSWORD=root -e POSTGRES_DB=fairhub_local -d postgres:latest - name: Create a redis instance for tests with docker - run: docker run --name cache -p 6379:6379 -e CACHE_DB=fairhub CACHE_HOST=localhost CACHE_PORT=6379 CACHE_URL=redis://127.0.0.1:6379 CACHE_KEY_PREFIX=fairhub-io# CACHE_TIMEOUT=86400 -d redis:7.2-alpine + run: docker run --name cache -p 6379:6379 -e CACHE_DB=fairhub -e CACHE_HOST=localhost -e CACHE_PORT=6379 -e CACHE_URL=redis://127.0.0.1:6379 -e CACHE_KEY_PREFIX=fairhub-io# -e CACHE_TIMEOUT=86400 -d redis:7.2-alpine - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 From 5f433c46b9ae57c2d92da2d9551af3082f483add Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 3 Nov 2023 20:30:38 -0700 Subject: [PATCH 345/505] =?UTF-8?q?=F0=9F=91=B7=20chore:=20update=20dev=20?= =?UTF-8?q?compose?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev-docker-compose.yaml | 53 ++++++++++------------------------------- 1 file changed, 12 insertions(+), 41 deletions(-) diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 6671b6cb..6f4c718f 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -1,61 +1,32 @@ version: '3' services: - flask-api: - build: - context: . - dockerfile: Dockerfile - ports: - - 5000:5000 - # volumes: - # - ./apis:/app/apis - # - ./model:/app/model - # - ./core:/app/core - # - ./app.py:/app/ - # - ./config.py:/app/ - environment: - FLASK_ENV: development - FLASK_DEBUG: 1 - FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/postgres}" - secret: "aaldkljla;jsdjklajlkkljdkljakjl;d;" - depends_on: - database: - condition: service_healthy - database: + postgres: image: postgres:latest + restart: always environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - PGUSER: postgres + POSTGRES_USER: admin + POSTGRES_PASSWORD: root + POSTGRES_DB: fairhub_local ports: - 5432:5432 - # restart: always - healthcheck: - test: pg_isready - interval: 10s - timeout: 5s - retries: 5 - # volumes: - # - ./postgres-data:/var/lib/postgresql/data - # - ./sql/init_timezones.sql:/docker-entrypoint-initdb.d/1-schema.sql + volumes: + - ./postgres-data:/var/lib/postgresql/data cache: image: redis:7.2-alpine - # restart: always + restart: always environment: CACHE_DB: fairhub CACHE_HOST: localhost CACHE_PORT: 6379 CACHE_URL: redis://127.0.0.1:6379 - CACHE_PREFIX: fairhub-io# + CACHE_KEY_PREFIX: fairhub-io# CACHE_TIMEOUT: 86400 - CACHE_PASSWORD: development ports: - '6379:6379' command: redis-server --save 20 1 --loglevel warning volumes: - cache:/data - # volumes: - # cache: - # driver: local - +volumes: + cache: + driver: local From 93514a2adde19684f762943c685c4a21ed8b3033 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Sun, 5 Nov 2023 21:08:09 -0800 Subject: [PATCH 346/505] =?UTF-8?q?=E2=9C=A8=20feat:=20dashboard=20crud=20?= =?UTF-8?q?api?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 3 + apis/authentication.py | 8 +- apis/dashboard.py | 252 ++++++++++++++++ apis/redcap.py | 282 ++++++------------ apis/redcap_data/__init__.py | 18 +- apis/redcap_data/redcap_project_data.py | 19 +- .../redcap_report_participant_values_data.py | 16 +- .../redcap_report_participants_data.py | 13 +- .../redcap_report_repeat_surveys_data.py | 13 +- .../redcap_report_survey_completions_data.py | 13 +- app.py | 17 +- dev-docker-compose.yaml | 53 +++- model/study_redcap_project_api.py | 51 ++-- model/study_redcap_project_dashboard.py | 58 ++-- tests/conftest.py | 4 +- 15 files changed, 507 insertions(+), 313 deletions(-) create mode 100644 apis/dashboard.py diff --git a/apis/__init__.py b/apis/__init__.py index ec3535f2..7881ac5d 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -7,6 +7,7 @@ from .authentication import api as authentication from .contributor import api as contributors_api +from .dashboard import api as dashboard from .dataset import api as dataset_api from .dataset_metadata.dataset_access import api as access from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier @@ -107,6 +108,7 @@ "study_description", "dataset_contributor", "redcap", + "dashboard", "redcap_project_data", "redcap_report_participants_data", "redcap_report_participants_values_data", @@ -140,3 +142,4 @@ def get(self): api.add_namespace(contributors_api) api.add_namespace(user) api.add_namespace(redcap) +api.add_namespace(dashboard) diff --git a/apis/authentication.py b/apis/authentication.py index 69379f10..c5d01d98 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -22,7 +22,7 @@ signup_model = api.model( "Signup", { - "email_address": fields.String(required=True, default=""), + "email_address": fields.String(required=True, default="sample@gmail.com"), "password": fields.String(required=True, default=""), "code": fields.String(required=True, default=""), }, @@ -58,7 +58,6 @@ def post(self): if os.environ.get("FLASK_ENV") != "testing": bypassed_emails = [ "test@fairhub.io", - "ymir@fairhub.io", "bpatel@fairhub.io", "sanjay@fairhub.io", "aydan@fairhub.io", @@ -117,7 +116,8 @@ def validate_password(instance): # Schema validation schema = { "type": "object", - "required": ["email_address", "password", "code"], + # "required": ["email_address", "password", "code"], + "required": ["email_address", "password"], "additionalProperties": False, "properties": { "email_address": {"type": "string", "format": "valid_email"}, @@ -125,7 +125,7 @@ def validate_password(instance): "type": "string", "format": "password", }, - "code": {"type": "string"}, + # "code": {"type": "string"}, }, } diff --git a/apis/dashboard.py b/apis/dashboard.py new file mode 100644 index 00000000..19dd052e --- /dev/null +++ b/apis/dashboard.py @@ -0,0 +1,252 @@ +"""API routes for study redcap""" +from typing import Any, Union + +from flask import request +from flask_restx import Namespace, Resource, fields, reqparse +from jsonschema import ValidationError, validate + +import model + +from .authentication import is_granted + +api = Namespace("Dashboard", description="Dashboard operations", path="/") + +redcap_project_dashboard_model = api.model( + "RedcapProjectDashboard", + { + "study_id": fields.String(required=True, description="Study ID"), + "project_id": fields.String( + required=True, description="REDCap project ID (pid)" + ), + "dashboard_id": fields.String( + required=True, readonly=True, description="REDCap dashboard ID" + ), + "dashboard_name": fields.String( + required=True, readonly=True, description="REDCap dashboard name" + ), + "dashboard_modules": fields.String( + required=True, readonly=True, description="REDCap dashboard name" + ), + "report_ids": fields.String( + required=True, readonly=True, description="REDCap project report IDs" + ), + }, +) + +dashboard_parser = reqparse.RequestParser() +dashboard_parser.add_argument("dashboard_id", type=str, help="Dashboard ID") + + +@api.route("/study//dashboard/all") +class RedcapProjectDashboards(Resource): + @api.doc("redcap_project_dashboards") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model, as_list=True) + def get(self, study_id: int): + """Get all study REDCap project dashboard""" + study = model.db.session.query(model.Study).get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( + study=study + ) + redcap_project_dashboards = [ + redcap_project_dashboard.to_dict() + for redcap_project_dashboard in redcap_project_dashboards + ] + return redcap_project_dashboards, 201 + + +@api.route("/study//dashboard/connect") +class ConnectRedcapProjectDashboard(Resource): + @api.doc(parser=dashboard_parser) + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def post(self, study_id: int): + """Create study REDCap project dashboard""" + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not modify", 403 + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "project_id", + "dashboard_name", + "dashboard_modules", + ], + "properties": { + "project_id": {"type": "string", "minLength": 1}, + "dashboard_name": {"type": "string", "minLength": 1}, + "dashboard_modules": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "properties": { + "key": {"type": "string", "minLength": 1}, + "name": {"type": "string", "minLength": 1}, + "selected": {"type": "boolean"}, + "reportId": {"type": "string", "minLength": 0}, + }, + } + ] + }, + "minItems": 1, + }, + }, + } + data: Union[Any, dict] = request.json + try: + validate(request.json, schema) + except ValidationError as e: + print(e) + return e.message, 400 + print("moduleslength", len(data["dashboard_modules"])) + + if len(data["project_id"]) < 1: + return ( + f"redcap project_id is required to connect a dashboard: {data['project_id']}", + 400, + ) + if len(data["dashboard_name"]) < 1: + return ( + f"dashboard dashboard_name is required to connect a dashboard: {data['dashboard_name']}", + 400, + ) + if len(data["dashboard_modules"]) < 1: + return ( + f"dashboard dashboard_modules is required to connect a dashboard: {data['dashboard_name']}", + 400, + ) + data["dashboard_modules"] = [ + dashboard_module + for dashboard_module in data["dashboard_modules"] + if dashboard_module["selected"] + ] + connect_redcap_project_dashboard = model.StudyRedcapProjectDashboard.from_data( + study, data + ) + model.db.session.add(connect_redcap_project_dashboard) + model.db.session.commit() + connect_redcap_project_dashboard = connect_redcap_project_dashboard.to_dict() + return connect_redcap_project_dashboard, 201 + + +@api.route("/study//dashboard") +class RedcapProjectDashboard(Resource): + """Get study REDCap project dashboard""" + + @api.doc(parser=dashboard_parser) + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def get(self, study_id: int): + """Get Study Redcap Project Dashboard""" + study = model.db.session.query(model.Study).get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not get this dashboard", 403 + dashboard_id = dashboard_parser.parse_args()["dashboard_id"] + redcap_project_dashboard = model.db.session.query( + model.StudyRedcapProjectDashboard + ).get(dashboard_id) + redcap_project_dashboard = redcap_project_dashboard.to_dict() + return redcap_project_dashboard, 201 + + @api.doc("redcap_project_dashboard") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def put(self, study_id: int): + """Update study REDCap project dashboard""" + study = model.db.session.query(model.Study).get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not modify this dashboard", 403 + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "project_id", + "dashboard_name", + "dashboard_modules", + ], + "properties": { + "project_id": {"type": "string", "minLength": 1}, + "dashboard_name": {"type": "string", "minLength": 1}, + "dashboard_modules": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "properties": { + "key": {"type": "string", "minLength": 1}, + "name": {"type": "string", "minLength": 1}, + "selected": {"type": "boolean"}, + "reportId": {"type": "string", "minLength": 0}, + }, + } + ] + }, + "minItems": 1, + }, + }, + } + data: Union[Any, dict] = request.json + try: + validate(request.json, schema) + except ValidationError as e: + print(e) + return e.message, 400 + print("moduleslength", len(data["dashboard_modules"])) + + if len(data["project_id"]) < 1: + return ( + f"redcap project_id is required to connect a dashboard: {data['project_id']}", + 400, + ) + if len(data["dashboard_name"]) < 1: + return ( + f"dashboard dashboard_name is required to connect a dashboard: {data['dashboard_name']}", + 400, + ) + if len(data["dashboard_modules"]) < 1: + return ( + f"dashboard dashboard_modules is required to connect a dashboard: {data['dashboard_name']}", + 400, + ) + data["dashboard_modules"] = [ + dashboard_module + for dashboard_module in data["dashboard_modules"] + if dashboard_module["selected"] + ] + update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( + data["dashboard_id"] + ) + update_redcap_project_dashboard.update(data) + model.db.session.commit() + update_redcap_project_dashboard = update_redcap_project_dashboard.to_dict() + return update_redcap_project_dashboard, 201 + + +@api.route("/study//dashboard/delete") +class DeleteRedcapProjectDashboard(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def post(self, study_id: int): + """Delete study REDCap project dashboard""" + study = model.Study.query.get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not delete this redcap project", 403 + data: Union[Any, dict] = request.json + model.StudyRedcapProjectDashboard.query.filter_by( + dashboard_id=data["dashboard_id"] + ).delete() + model.db.session.commit() + return 204 diff --git a/apis/redcap.py b/apis/redcap.py index 0a52a4ad..3712992a 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -9,108 +9,85 @@ from .authentication import is_granted -api = Namespace("Redcap", description="Redcap operations", path="/") +api = Namespace("Redcap", description="REDCap operations", path="/") redcap_project_view_model = api.model( "RedcapProjectAPI", { - "study_id": fields.String(required=True), - "project_title": fields.String(required=True), - "project_id": fields.String(required=True), - "project_api_url": fields.String(required=True), - "project_api_active": fields.Boolean(required=True), + "study_id": fields.String(required=True, description="Study ID"), + "project_id": fields.String( + required=True, description="REDCap project ID (pid)" + ), + "project_title": fields.String( + required=True, description="REDCap project title" + ), + "project_api_url": fields.String( + required=True, description="REDCap project API url" + ), + "project_api_active": fields.Boolean( + required=True, description="REDCap project is active" + ), }, ) redcap_project_api_model = api.model( "RedcapProjectAPI", { - "study_id": fields.String(required=True), - "project_title": fields.String(required=True), - "project_id": fields.String(required=True), - "project_api_key": fields.String(required=True), - "project_api_url": fields.String(required=True), - "project_api_active": fields.Boolean(required=True), - }, -) - -redcap_project_dashboard_model = api.model( - "RedcapProjectDashboard", - { - "project_id": fields.String(required=True), - "dashboard_id": fields.String( - required=True, readonly=True, description="REDCap dashboard ID" + "study_id": fields.String(required=True, description="Study ID"), + "project_id": fields.String( + required=True, description="REDCap project ID (pid)" + ), + "project_title": fields.String( + required=True, description="REDCap project title" ), - "dashboard_name": fields.String( - required=True, readonly=True, description="REDCap dashboard name" + "project_api_key": fields.String( + required=True, description="REDCap project API key" ), - "dashboard_modules": fields.String( - required=True, readonly=True, description="REDCap dashboard name" + "project_api_url": fields.String( + required=True, description="REDCap project API url" ), - "report_ids": fields.String( - required=True, readonly=True, description="REDCap project report IDs" + "project_api_active": fields.Boolean( + required=True, description="REDCap project is active" ), }, ) +project_parser = reqparse.RequestParser() +project_parser.add_argument("project_id", type=str, help="REDCap project ID (pid)") + @api.route("/study//redcap/all") class RedcapProjectAPIs(Resource): - """Study Redcap Metadata""" - @api.doc("redcap_project_apis") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model, as_list=True) def get(self, study_id: int): - """List all study REDCap project API links""" + """Get all study REDCap project API links""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): - return "Access denied, you can not modify", 403 - # redcap_project_apis = model.StudyRedcapProjectApi.query.all(study) + return ( + "Access denied, you can not view the redcap projects for this study", + 403, + ) redcap_project_views = model.StudyRedcapProjectApi.query.filter_by(study=study) - return [ + redcap_project_views = [ redcap_project_view.to_dict() for redcap_project_view in redcap_project_views ] + return redcap_project_views, 201 -project_parser = reqparse.RequestParser().add_argument( - "project_id", type=str, help="REDCap project ID (pid)" -) - - -@api.route("/study//redcap") -class RedcapProjectAPI(Resource): - """Study Redcap Metadata""" - - @api.doc(parser=project_parser) - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_view_model) - def get(self, study_id: int): - """Get study REDCap project API link""" - study = model.db.session.query(model.Study).get(study_id) - if is_granted("redcap_access", study): - return "Access denied, you can not modify", 403 - project_id = project_parser.parse_args()["project_id"] - redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get( - project_id - ) - - if redcap_project_view is None: - return {"error": "Project not found"}, 404 - - return redcap_project_view.to_dict(), 201 - +@api.route("/study//redcap/add") +class AddRedcapProjectAPI(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) - def put(self, study_id: int): - """Update study REDCap project API link""" + def post(self, study_id: int): + """Create REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not create a redcap project", 403 # Schema validation data: Union[Any, dict] = request.json schema = { @@ -120,15 +97,18 @@ def put(self, study_id: int): "project_title", "project_id", "project_api_url", + "project_api_key", "project_api_active", ], "properties": { "project_title": {"type": "string", "minLength": 1}, - "project_id": {"type": "string", "minLength": 1, "maxLength": 12}, + "project_id": {"type": "string", "minLength": 5}, "project_api_url": {"type": "string", "minLength": 1}, + "project_api_key": {"type": "string", "minLength": 32}, "project_api_active": {"type": "boolean"}, }, } + try: validate(request.json, schema) except ValidationError as e: @@ -136,46 +116,63 @@ def put(self, study_id: int): if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: {data['project_title']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_title is required for redcap access: {data['project_title']}", 400, ) if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: {data['project_id']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_id is required for redcap access: {data['project_id']}", 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: {data['project_api_url']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_api_url is required for redcap access: {data['project_api_url']}", 400, ) - - if isinstance(data["project_api_active"], bool) is False: + if len(data["project_api_key"]) < 1: + return ( + f"redcap project_api_key is required for redcap access: {data['project_api_key']}", + 400, + ) + if type(data["project_api_active"]) is not bool: return ( - f"redcap project_api_active is required for redcap access: {data['project_api_active']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_api_active is required for redcap access: {data['project_api_active']}", 400, ) - update_redcap_project_view = model.StudyRedcapProjectApi.query.get( - data["project_id"] - ) - update_redcap_project_view.update(data) + add_redcap_project_api = model.StudyRedcapProjectApi.from_data(study, data) + model.db.session.add(add_redcap_project_api) model.db.session.commit() - return update_redcap_project_view, 201 + add_redcap_project_api = add_redcap_project_api.to_dict() + return add_redcap_project_api, 201 -@api.route("/study//redcap/add") -class AddRedcapProjectAPI(Resource): - """Study Redcap Metadata""" +@api.route("/study//redcap") +class RedcapProjectAPI(Resource): + @api.doc(parser=project_parser) + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_view_model) + def get(self, study_id: int): + """Get study REDCap project API link""" + study = model.db.session.query(model.Study).get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not get this redcap project", 403 + project_id = project_parser.parse_args()["project_id"] + redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get( + project_id + ) + redcap_project_view = redcap_project_view.to_dict() + return redcap_project_view, 201 @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) - def post(self, study_id: int): - """Create new study REDCap project API link""" + def put(self, study_id: int): + """Update study REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not modify this redcap project", 403 # Schema validation data: Union[Any, dict] = request.json schema = { @@ -185,18 +182,15 @@ def post(self, study_id: int): "project_title", "project_id", "project_api_url", - "project_api_key", "project_api_active", ], "properties": { "project_title": {"type": "string", "minLength": 1}, - "project_id": {"type": "string", "minLength": 5}, + "project_id": {"type": "string", "minLength": 1, "maxLength": 12}, "project_api_url": {"type": "string", "minLength": 1}, - "project_api_key": {"type": "string", "minLength": 32}, "project_api_active": {"type": "boolean"}, }, } - try: validate(request.json, schema) except ValidationError as e: @@ -204,40 +198,36 @@ def post(self, study_id: int): if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: {data['project_title']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_title is required for redcap access: {data['project_title']}", 400, ) if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: {data['project_id']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_id is required for redcap access: {data['project_id']}", 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: {data['project_api_url']}", # noqa E501 # pylint: disable=line-too-long - 400, - ) - if len(data["project_api_key"]) < 1: - return ( - f"redcap project_api_key is required for redcap access: {data['project_api_key']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_api_url is required for redcap access: {data['project_api_url']}", 400, ) - if isinstance(data["project_api_active"], bool) is False: + if type(data["project_api_active"]) is not bool: return ( - f"redcap project_api_active is required for redcap access: {data['project_api_active']}", # noqa E501 # pylint: disable=line-too-long + f"redcap project_api_active is required for redcap access: {data['project_api_active']}", 400, ) - add_redcap_project_api = model.StudyRedcapProjectApi.from_data(study, data) - model.db.session.add(add_redcap_project_api) + update_redcap_project_view = model.StudyRedcapProjectApi.query.get( + data["project_id"] + ) + update_redcap_project_view.update(data) model.db.session.commit() - return add_redcap_project_api, 201 + update_redcap_project_view = update_redcap_project_view.to_dict() + return update_redcap_project_view, 201 @api.route("/study//redcap/delete") class DeleteRedcapProjectAPI(Resource): - """Study Redcap Metadata""" - @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) @@ -245,96 +235,10 @@ def post(self, study_id: int): """Delete study REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not delete this redcap project", 403 data: Union[Any, dict] = request.json - delete_redcap_project_api = model.StudyRedcapProjectApi.query.filter_by( + model.StudyRedcapProjectApi.query.filter_by( project_id=data["project_id"] ).delete() model.db.session.commit() - return delete_redcap_project_api, 204 - - -@api.route("/study//redcap/dashboards") -class RedcapProjectDashboards(Resource): - """Study Redcap Metadata""" - - @api.doc("redcap_project_dashboards") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_dashboard_model, as_list=True) - def get(self, study_id: int): - """Get study redcap""" - study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): - return "Access denied, you can not modify", 403 - # redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.all(study) - redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( - study=study - ) - return [ - redcap_project_dashboard.to_dict() - for redcap_project_dashboard in redcap_project_dashboards - ] - - -@api.route("/study//redcap/dashboard") -class RedcapProjectDashboard(Resource): - """Study Redcap Metadata""" - - @api.doc("redcap_project_dashboard") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_dashboard_model) - def get(self, study_id: int, project_id: str, dashboard_id: str): - """Get study redcap""" - study = model.Study.query.get(study_id) - study_redcap_project_api = model.StudyRedcapProjectApi.query.get(project_id) - study_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( - dashboard_id - ) - if is_granted("redcap_access", study): - return "Access denied, you can not modify", 403 - redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( - study=study, - study_redcap_project_api=study_redcap_project_api, - study_redcap_project_dashboard=study_redcap_project_dashboard, - ) - return [ - redcap_project_dashboard.to_dict() - for redcap_project_dashboard in redcap_project_dashboards - ] - - @api.doc("redcap_project_dashboard") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_dashboard_model) - def put(self, study_id: int): - """Update study redcap""" - study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): - return "Access denied, you can not modify", 403 - data: Union[Any, dict] = request.json - update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( - data["project_id"] - ) - update_redcap_project_dashboard.update(data) - model.db.session.commit() - return update_redcap_project_dashboard.to_dict() - - @api.doc("redcap_project_dashboard") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_dashboard_model) - def delete(self, study_id: int): - """Delete study redcap metadata""" - study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): - return "Access denied, you can not delete study", 403 - data: Union[Any, dict] = request.json - redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( - data["project_id"] - ) - model.db.session.delete(redcap_project_dashboard) - model.db.session.commit() - return 204 diff --git a/apis/redcap_data/__init__.py b/apis/redcap_data/__init__.py index 8b85fd76..4f12cfc0 100644 --- a/apis/redcap_data/__init__.py +++ b/apis/redcap_data/__init__.py @@ -1,9 +1,9 @@ -# from .redcap_project_data import RedcapProjectDataResource -# from .redcap_report_participant_values_data import ( -# RedcapReportParticipantValuesDataResource, -# ) -# from .redcap_report_participants_data import RedcapReportParticipantsDataResource -# from .redcap_report_repeat_surveys_data import RedcapReportRepeatSurveysDataResource -# from .redcap_report_survey_completions_data import ( -# RedcapReportSurveyCompletionsDataResource, -# ) +from .redcap_project_data import RedcapProjectDataResource +from .redcap_report_participant_values_data import ( + RedcapReportParticipantValuesDataResource, +) +from .redcap_report_participants_data import RedcapReportParticipantsDataResource +from .redcap_report_repeat_surveys_data import RedcapReportRepeatSurveysDataResource +from .redcap_report_survey_completions_data import ( + RedcapReportSurveyCompletionsDataResource, +) diff --git a/apis/redcap_data/redcap_project_data.py b/apis/redcap_data/redcap_project_data.py index b49e2d51..f2cec382 100644 --- a/apis/redcap_data/redcap_project_data.py +++ b/apis/redcap_data/redcap_project_data.py @@ -1,15 +1,14 @@ """API routes for redcap project""" -# import typing +import typing -# from flask import request +from flask import request from flask_restx import Resource, fields - -# from jsonschema import ValidationError, validate +from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -# from ..authentication import is_granted +from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig @@ -84,15 +83,12 @@ @api.route("/study//redcap//project") class RedcapProjectDataResource(Resource): - """RedcapProjectDataResource""" - @api.doc("project") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_data) - def get( - self, study_id: int, redcap_project_id: str - ): # pylint: disable=unused-argument + # @cache.cached() + def get(self, study_id: int, redcap_project_id: str): """ Get REDCap project @@ -105,4 +101,5 @@ def get( PyCapProject = Project( study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] ) - return PyCapProject.export_project_info() + project = PyCapProject.export_project_info() + return project diff --git a/apis/redcap_data/redcap_report_participant_values_data.py b/apis/redcap_data/redcap_report_participant_values_data.py index 7240e5e9..a69cca2e 100644 --- a/apis/redcap_data/redcap_report_participant_values_data.py +++ b/apis/redcap_data/redcap_report_participant_values_data.py @@ -1,15 +1,14 @@ """API routes for redcap report participant values data""" -# import typing +import typing -# from flask import request +from flask import request from flask_restx import Resource, fields - -# from jsonschema import ValidationError, validate +from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -# from ..authentication import is_granted +from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig @@ -38,6 +37,7 @@ "dm": fields.String( required=True, readonly=True, description="Data approved for Fairhub.io" ), + "siteid": fields.String(required=True, readonly=True, description="Site ID"), "genderid": fields.String( required=True, readonly=True, description="Gender identity" ), @@ -96,7 +96,7 @@ "dvrtnyn": fields.String( required=True, readonly=True, - description="Was the participant given device return instructions and shipping materials?", # noqa: E501 # pylint: disable=line-too-long + description="Was the participant given device return instructions and shipping materials?", ), "dvrtnship": fields.String( required=True, readonly=True, description="Return shipping tracking number" @@ -152,9 +152,7 @@ class RedcapReportParticipantValuesDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_participant_values_data) # @IN_MEMORY_CACHE.cached() - def get( - self, study_id: int, redcap_project_id: str - ): # pylint: disable=unused-argument + def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/apis/redcap_data/redcap_report_participants_data.py b/apis/redcap_data/redcap_report_participants_data.py index 7f562bba..eed60162 100644 --- a/apis/redcap_data/redcap_report_participants_data.py +++ b/apis/redcap_data/redcap_report_participants_data.py @@ -1,15 +1,14 @@ """API routes for redcap report participants data data""" -# import typing +import typing -# from flask import request +from flask import request from flask_restx import Resource, fields - -# from jsonschema import ValidationError, validate +from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -# from ..authentication import is_granted +from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig @@ -54,9 +53,7 @@ class RedcapReportParticipantsDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_participants_data) # @IN_MEMORY_CACHE.cached() - def get( - self, study_id: int, redcap_project_id: str - ): # pylint: disable=unused-argument + def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/apis/redcap_data/redcap_report_repeat_surveys_data.py b/apis/redcap_data/redcap_report_repeat_surveys_data.py index f25656bb..79bd9c29 100644 --- a/apis/redcap_data/redcap_report_repeat_surveys_data.py +++ b/apis/redcap_data/redcap_report_repeat_surveys_data.py @@ -1,15 +1,14 @@ """API routes for redcap report repeat surveys data""" -# import typing +import typing -# from flask import request +from flask import request from flask_restx import Resource, fields - -# from jsonschema import ValidationError, validate +from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -# from ..authentication import is_granted +from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig @@ -60,9 +59,7 @@ class RedcapReportRepeatSurveysDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_repeat_surveys_data) # @IN_MEMORY_CACHE.cached() - def get( - self, study_id: int, redcap_project_id: str - ): # pylint: disable=unused-argument + def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/apis/redcap_data/redcap_report_survey_completions_data.py b/apis/redcap_data/redcap_report_survey_completions_data.py index e1a8fb2f..318ba229 100644 --- a/apis/redcap_data/redcap_report_survey_completions_data.py +++ b/apis/redcap_data/redcap_report_survey_completions_data.py @@ -1,15 +1,14 @@ """API routes for redcap report survey completions data""" -# import typing +import typing -# from flask import request +from flask import request from flask_restx import Resource, fields - -# from jsonschema import ValidationError, validate +from jsonschema import ValidationError, validate import model from apis.redcap_data_namespace import api -# from ..authentication import is_granted +from ..authentication import is_granted # # REDCap Data Visualization ETL Configuration # from modules.etl.config import redcapTransformConfig @@ -172,9 +171,7 @@ class RedcapReportSurveyCompletionsDataResource(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_report_survey_completions_data) # @IN_MEMORY_CACHE.cached() - def get( - self, study_id: int, redcap_project_id: str - ): # pylint: disable=unused-argument + def get(self, study_id: int, redcap_project_id: str): study_ = model.Study.query.get(study_id) study_redcap_ = study_.study_redcap.to_dict() PyCapProject = Project( diff --git a/app.py b/app.py index 3590b35f..ab5f277a 100644 --- a/app.py +++ b/app.py @@ -42,24 +42,21 @@ def create_app(config_module=None): # csrf = CSRFProtect() # csrf.init_app(app) - # All configuration variables that start with FAIRHUB_ will be loaded app.config.from_prefixed_env("FAIRHUB") - if config.FAIRHUB_SECRET: if len(config.FAIRHUB_SECRET) < 32: raise RuntimeError("FAIRHUB_SECRET must be at least 32 characters long") else: raise RuntimeError("FAIRHUB_SECRET not set") - - if "DATABASE_URL" in app.config: - app.config["SQLALCHEMY_DATABASE_URI"] = app.config["DATABASE_URL"] + if "FAIRHUB_DATABASE_URL" in app.config: + # if "TESTING" in app_config and app_config["TESTING"]: + # pass + # else: + # print("DATABASE_URL: ", app.config["DATABASE_URL"]) + app.config["SQLALCHEMY_DATABASE_URI"] = app.config["FAIRHUB_DATABASE_URL"] else: raise RuntimeError("FAIRHUB_DATABASE_URL not set") - # Testing for only one of the 5 required cache variables - if "CACHE_URL" not in app.config: - raise RuntimeError("FAIRHUB_CACHE_URL not set") - cache_config = { key: value for key, value in app.config.items() @@ -117,7 +114,7 @@ def create_schema(): table_names = [table.name for table in metadata.tables.values()] if len(table_names) == 0: with engine.begin(): - # Create the database schema + """Create the database schema.""" model.db.create_all() @app.before_request diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 6f4c718f..6671b6cb 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -1,32 +1,61 @@ version: '3' services: - postgres: + flask-api: + build: + context: . + dockerfile: Dockerfile + ports: + - 5000:5000 + # volumes: + # - ./apis:/app/apis + # - ./model:/app/model + # - ./core:/app/core + # - ./app.py:/app/ + # - ./config.py:/app/ + environment: + FLASK_ENV: development + FLASK_DEBUG: 1 + FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/postgres}" + secret: "aaldkljla;jsdjklajlkkljdkljakjl;d;" + depends_on: + database: + condition: service_healthy + database: image: postgres:latest - restart: always environment: - POSTGRES_USER: admin - POSTGRES_PASSWORD: root - POSTGRES_DB: fairhub_local + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + PGUSER: postgres ports: - 5432:5432 - volumes: - - ./postgres-data:/var/lib/postgresql/data + # restart: always + healthcheck: + test: pg_isready + interval: 10s + timeout: 5s + retries: 5 + # volumes: + # - ./postgres-data:/var/lib/postgresql/data + # - ./sql/init_timezones.sql:/docker-entrypoint-initdb.d/1-schema.sql cache: image: redis:7.2-alpine - restart: always + # restart: always environment: CACHE_DB: fairhub CACHE_HOST: localhost CACHE_PORT: 6379 CACHE_URL: redis://127.0.0.1:6379 - CACHE_KEY_PREFIX: fairhub-io# + CACHE_PREFIX: fairhub-io# CACHE_TIMEOUT: 86400 + CACHE_PASSWORD: development ports: - '6379:6379' command: redis-server --save 20 1 --loglevel warning volumes: - cache:/data -volumes: - cache: - driver: local + # volumes: + # cache: + # driver: local + diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py index 7f94b6d7..1ed408da 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap_project_api.py @@ -1,4 +1,5 @@ import uuid +from dataclasses import dataclass from datetime import datetime, timezone from model import Study @@ -6,24 +7,31 @@ from .db import db +@dataclass class StudyRedcapProjectApi(db.Model): # type: ignore - """A study is a collection of datasets and participants""" + """ + A REDCap Project API is associated a study + """ - def __init__(self, study): - self.study = study - self.id = str(uuid.uuid4()) - self.created_at = datetime.now(timezone.utc).timestamp() + study_id: str + project_id: int + project_title: str + project_api_url: str + project_api_key: str + project_api_active: bool + created_at: int + updated_on: int - __tablename__ = "study_redcap_project_api" - project_id = db.Column(db.CHAR(5), primary_key=True) - project_title = db.Column(db.String, nullable=False) - project_api_url = db.Column(db.String, nullable=False) - project_api_key = db.Column(db.String, nullable=False) - project_api_active = db.Column(db.Boolean, nullable=False) - created_at = db.Column(db.BigInteger, nullable=False) - updated_on = db.Column(db.BigInteger, nullable=False) + __tablename__: str = "study_redcap_project_api" + project_id: int = db.Column(db.BigInteger, primary_key=True) + project_title: str = db.Column(db.String, nullable=False) + project_api_url: str = db.Column(db.String, nullable=False) + project_api_key: str = db.Column(db.String, nullable=False) + project_api_active: bool = db.Column(db.Boolean, nullable=False) + created_at: int = db.Column(db.BigInteger, nullable=False) + updated_on: int = db.Column(db.BigInteger, nullable=False) - study_id = db.Column( + study_id: str = db.Column( db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False, @@ -35,12 +43,16 @@ def __init__(self, study): "StudyRedcapProjectDashboard", back_populates="study_redcap_project_api" ) + def __init__(self, study): + self.study = study + self.id = str(uuid.uuid4()) + self.created_at = datetime.now(timezone.utc).timestamp() + def to_dict(self): """Converts the study to a dictionary""" return { - "study_id": self.study.id, - "project_title": self.project_title, "project_id": self.project_id, + "project_title": self.project_title, "project_api_url": self.project_api_url, "project_api_key": self.project_api_key, "project_api_active": self.project_api_active, @@ -55,15 +67,12 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - assignable = { - key - for key in self.to_dict().keys() # pylint: disable=consider-iterating-dictionary - if key.startswith("project") - } + assignable = {key for key in self.to_dict().keys() if key.startswith("project")} for key, val in data.items(): if key in assignable: setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() + return self def validate(self): """Validates the study""" diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index c3963bb1..14d849ef 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -1,7 +1,8 @@ import uuid +from dataclasses import dataclass from datetime import datetime, timezone -from sqlalchemy import String +from sqlalchemy import JSON from sqlalchemy.dialects.postgresql import ARRAY from model import Study @@ -9,31 +10,38 @@ from .db import db +@dataclass class StudyRedcapProjectDashboard(db.Model): # type: ignore - """A study is a collection of datasets and participants""" + """ + A Project Dashboard is associated with a + REDCap Project, which is part of a study + """ - def __init__(self, study): - self.study = study - self.id = str(uuid.uuid4()) - self.created_at = datetime.now(timezone.utc).timestamp() - - __tablename__ = "study_redcap_project_dashboard" - dashboard_id = db.Column(db.CHAR(36), primary_key=True) - dashboard_name = db.Column(db.String, nullable=False) - dashboard_modules = db.Column(ARRAY(String), nullable=False) - created_at = db.Column(db.BigInteger, nullable=False) - updated_on = db.Column(db.BigInteger, nullable=False) + project_id: int + dashboard_id: str + dashboard_name: str + dashboard_modules: list[dict[str, (str | bool | int)]] + created_at: int + updated_on: int - study_id = db.Column( - db.CHAR(36), - db.ForeignKey("study.id", ondelete="CASCADE"), - nullable=False, + __tablename__: str = "study_redcap_project_dashboard" + dashboard_id: str = db.Column(db.CHAR(36), primary_key=True) + dashboard_name: str = db.Column(db.String, nullable=False) + dashboard_modules: list[dict[str, (str | bool | int)]] = db.Column( + ARRAY(JSON), nullable=True ) - project_id = db.Column( - db.CHAR(5), + created_at: int = db.Column(db.BigInteger, nullable=False) + updated_on: int = db.Column(db.BigInteger, nullable=False) + project_id: int = db.Column( + db.BigInteger, db.ForeignKey("study_redcap_project_api.project_id", ondelete="CASCADE"), nullable=False, ) + study_id: str = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + nullable=False, + ) study = db.relationship( "Study", back_populates="study_redcap_project_dashboards", cascade="all, delete" ) @@ -43,13 +51,18 @@ def __init__(self, study): cascade="all, delete", ) + def __init__(self, study): + self.study = study + self.dashboard_id = str(uuid.uuid4()) + self.created_at = datetime.now(timezone.utc).timestamp() + def to_dict(self): """Converts the study to a dictionary""" return { "project_id": self.project_id, "dashboard_id": self.dashboard_id, "dashboard_name": self.dashboard_name, - "dashboard_endpoint": self.dashboard_endpoint, + "dashboard_modules": self.dashboard_modules, "created_at": self.created_at, "updated_on": self.updated_on, } @@ -65,13 +78,14 @@ def update(self, data: dict): """Updates the study from a dictionary""" assignable = { key - for key in self.__dict__.keys() # pylint: disable=consider-iterating-dictionary - if key.startswith("dashboard") + for key in self.to_dict().keys() + if key.startswith("project") or key.startswith("dashboard") } for key, val in data.items(): if key in assignable: setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() + return self def validate(self): """Validates the study""" diff --git a/tests/conftest.py b/tests/conftest.py index 37c2d811..32c2f8e7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -64,7 +64,7 @@ def _create_user(_test_client): response = _test_client.post( "/auth/signup", json={ - "email_address": "ymir@fairhub.io", + "email_address": "sample@gmail.com", "password": "Testingyeshello11!", "code": "7654321", }, @@ -81,7 +81,7 @@ def _login_user(_test_client): response = _test_client.post( "/auth/login", json={ - "email_address": "ymir@fairhub.io", + "email_address": "sample@gmail.com", "password": "Testingyeshello11!", }, ) From eddbf544a3a3df270e851d4b4fe177c1da830484 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 6 Nov 2023 12:13:40 -0800 Subject: [PATCH 347/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20study=20?= =?UTF-8?q?metadata=20input=20schemas?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_available_ipd.py | 9 +++------ apis/study_metadata/study_contact.py | 1 + apis/study_metadata/study_intervention.py | 10 ++++------ apis/study_metadata/study_ipdsharing.py | 9 ++++----- apis/study_metadata/study_link.py | 9 ++++----- apis/study_metadata/study_location.py | 7 +++---- apis/study_metadata/study_overall_official.py | 7 +++---- apis/study_metadata/study_reference.py | 7 +++---- apis/study_metadata/study_status.py | 2 +- model/invited_study_contributor.py | 2 +- 10 files changed, 27 insertions(+), 36 deletions(-) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 3c57be7c..09413578 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -47,8 +47,6 @@ def get(self, study_id: int): ) @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(study_available) - # marshal with will need to be removed to have validation errors return @api.expect(study_available) def post(self, study_id: int): """Create study available metadata""" @@ -59,6 +57,7 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "properties": { + "id": {"type": "string"}, "identifier": {"type": "string", "minLength": 1}, "type": { "type": "string", @@ -98,12 +97,10 @@ def post(self, study_id: int): if "id" in i and i["id"]: study_available_ipd_ = model.StudyAvailableIpd.query.get(i["id"]) study_available_ipd_.update(i) - list_of_elements.append(study_available_ipd_.to_dict()) - elif "id" not in i or not i["id"]: + else: study_available_ipd_ = model.StudyAvailableIpd.from_data(study_obj, i) model.db.session.add(study_available_ipd_) - list_of_elements.append(study_available_ipd_.to_dict()) - + list_of_elements.append(study_available_ipd_.to_dict()) model.db.session.commit() return list_of_elements diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 3c1a2466..129ac385 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -71,6 +71,7 @@ def validate_is_valid_email(instance): "email_address", ], "properties": { + "id": {"type": "string"}, "name": {"type": "string", "minLength": 1}, "affiliation": {"type": "string", "minLength": 1}, "role": {"type": "string", "minLength": 1}, diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 3bf88d99..61a5573a 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -53,6 +53,7 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "properties": { + "id": {"type": "string"}, "type": { "type": "string", "enum": [ @@ -71,7 +72,7 @@ def post(self, study_id: int): ], }, "name": {"type": "string", "minLength": 1}, - "description": {"type": "string", "minLength": 1}, + "description": {"type": "string"}, "arm_group_label_list": { "type": "array", "items": {"type": "string", "minLength": 1}, @@ -81,7 +82,6 @@ def post(self, study_id: int): "other_name_list": { "type": "array", "items": {"type": "string", "minLength": 1}, - "minItems": 1, "uniqueItems": True, }, }, @@ -104,12 +104,10 @@ def post(self, study_id: int): if "id" in i and i["id"]: study_intervention_ = model.StudyIntervention.query.get(i["id"]) study_intervention_.update(i) - list_of_elements.append(study_intervention_.to_dict()) - elif "id" not in i or not i["id"]: + else: study_intervention_ = model.StudyIntervention.from_data(study_obj, i) model.db.session.add(study_intervention_) - list_of_elements.append(study_intervention_.to_dict()) - + list_of_elements.append(study_intervention_.to_dict()) model.db.session.commit() return list_of_elements diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index f82b6a75..c04d6fae 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -47,7 +47,7 @@ def put(self, study_id: int): "additionalProperties": False, "properties": { "ipd_sharing": {"type": "string", "enum": ["Yes", "No", "Undecided"]}, - "ipd_sharing_description": {"type": "string", "minLength": 1}, + "ipd_sharing_description": {"type": "string"}, "ipd_sharing_info_type_list": { "type": "array", "items": { @@ -60,12 +60,11 @@ def put(self, study_id: int): "Analytical Code", ], }, - "minItems": 1, "uniqueItems": True, }, - "ipd_sharing_time_frame": {"type": "string", "minLength": 1}, - "ipd_sharing_access_criteria": {"type": "string", "minLength": 1}, - "ipd_sharing_url": {"type": "string", "format": "uri", "minLength": 1}, + "ipd_sharing_time_frame": {"type": "string"}, + "ipd_sharing_access_criteria": {"type": "string"}, + "ipd_sharing_url": {"type": "string", "format": "uri"}, }, "required": [ "ipd_sharing", diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 76f5190f..6f97b112 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -45,7 +45,8 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "properties": { - "url": {"type": "string", "format": "uri"}, + "id": {"type": "string"}, + "url": {"type": "string", "format": "uri", "minLength": 1}, "title": {"type": "string"}, }, "required": ["url", "title"], @@ -70,13 +71,11 @@ def post(self, study_id: int): return f"Study link {i['id']} Id is not found", 404 study_link_.update(i) - list_of_elements.append(study_link_.to_dict()) - elif "id" not in i or not i["id"]: + else: study_link_ = model.StudyLink.from_data(study_obj, i) model.db.session.add(study_link_) - list_of_elements.append(study_link_.to_dict()) - + list_of_elements.append(study_link_.to_dict()) model.db.session.commit() return list_of_elements diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index b495864f..09ae5cc1 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -52,6 +52,7 @@ def post(self, study_id: int): "items": { "type": "object", "properties": { + "id": {"type": "string"}, "facility": {"type": "string", "minLength": 1}, "status": { "type": "string", @@ -89,12 +90,10 @@ def post(self, study_id: int): if "id" in i and i["id"]: study_location_ = model.StudyLocation.query.get(i["id"]) study_location_.update(i) - list_of_elements.append(study_location_.to_dict()) - elif "id" not in i or not i["id"]: + else: study_location_ = model.StudyLocation.from_data(study_obj, i) model.db.session.add(study_location_) - list_of_elements.append(study_location_.to_dict()) - + list_of_elements.append(study_location_.to_dict()) model.db.session.commit() return list_of_elements diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 836145cb..def4a2da 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -55,6 +55,7 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "properties": { + "id": {"type": "string"}, "name": {"type": "string", "minLength": 1}, "affiliation": {"type": "string", "minLength": 1}, "role": { @@ -85,14 +86,12 @@ def post(self, study_id: int): if "id" in i and i["id"]: study_overall_official_ = model.StudyOverallOfficial.query.get(i["id"]) study_overall_official_.update(i) - list_of_elements.append(study_overall_official_.to_dict()) - elif "id" not in i or not i["id"]: + else: study_overall_official_ = model.StudyOverallOfficial.from_data( study_obj, i ) model.db.session.add(study_overall_official_) - list_of_elements.append(study_overall_official_.to_dict()) - + list_of_elements.append(study_overall_official_.to_dict()) model.db.session.commit() return list_of_elements diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 871909d2..6e01ea41 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -50,6 +50,7 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "properties": { + "id": {"type": "string"}, "identifier": {"type": "string"}, "type": {"type": ["string", "null"]}, "citation": {"type": "string", "minLength": 1}, @@ -73,12 +74,10 @@ def post(self, study_id: int): if "id" in i and i["id"]: study_reference_ = model.StudyReference.query.get(i["id"]) study_reference_.update(i) - list_of_elements.append(study_reference_.to_dict()) - elif "id" not in i or not i["id"]: + else: study_reference_ = model.StudyReference.from_data(study_obj, i) model.db.session.add(study_reference_) - list_of_elements.append(study_reference_.to_dict()) - + list_of_elements.append(study_reference_.to_dict()) model.db.session.commit() return list_of_elements diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 0840cf64..941220a2 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -70,7 +70,7 @@ def put(self, study_id: int): "Completed", ], }, - "why_stopped": {"type": "string", "minLength": 1}, + "why_stopped": {"type": "string"}, "start_date": {"type": "string", "minLength": 1}, "start_date_type": { "type": "string", diff --git a/model/invited_study_contributor.py b/model/invited_study_contributor.py index dd4279e9..99b8ef24 100644 --- a/model/invited_study_contributor.py +++ b/model/invited_study_contributor.py @@ -1,7 +1,7 @@ import datetime +import random import uuid -import random from .db import db from .study import Study From e074add4ee9948ce6a0c0fe621ce916793e37c16 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 7 Nov 2023 13:11:33 -0800 Subject: [PATCH 348/505] =?UTF-8?q?=E2=9C=A8=20feat:=20add=20support=20for?= =?UTF-8?q?=20jsonschema=20validation=20for=20dataset=20metadata=20(#20)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✨ feat: add support for jsonschema validation for dataset metadata * 🐛 fix: add better validation for related items --- apis/dataset_metadata/dataset_access.py | 30 +++ .../dataset_alternate_identifier.py | 59 +++++- apis/dataset_metadata/dataset_consent.py | 38 ++++ apis/dataset_metadata/dataset_contributor.py | 160 ++++++++++++++++ apis/dataset_metadata/dataset_date.py | 39 ++++ .../dataset_de_ident_level.py | 39 ++++ apis/dataset_metadata/dataset_description.py | 45 +++++ apis/dataset_metadata/dataset_funder.py | 44 +++++ apis/dataset_metadata/dataset_other.py | 64 +++++++ apis/dataset_metadata/dataset_record_keys.py | 28 +++ apis/dataset_metadata/dataset_related_item.py | 175 ++++++++++++++++++ apis/dataset_metadata/dataset_rights.py | 34 ++++ apis/dataset_metadata/dataset_subject.py | 41 ++++ apis/dataset_metadata/dataset_title.py | 44 +++++ 14 files changed, 839 insertions(+), 1 deletion(-) diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index 0a5f3046..d9e79d1e 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -1,5 +1,8 @@ +"""API for dataset access metadata""" + from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -19,11 +22,14 @@ @api.route("/study//dataset//metadata/access") class DatasetAccessResource(Resource): + """Dataset Access Resource""" + @api.doc("access") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_access) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset access""" dataset_ = model.Dataset.query.get(dataset_id) dataset_access_ = dataset_.dataset_access return dataset_access_.to_dict() @@ -32,10 +38,34 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Update dataset access""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "description": {"type": "string", "minLength": 1}, + "type": {"type": "string", "minLength": 1}, + "url": {"type": "string"}, + "url_last_checked": {"type": ["integer", "null"]}, + }, + "required": [ + "description", + "type", + "url", + "url_last_checked", + ], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_access.update(request.json) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 33ec00a2..f81144a2 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,7 +1,9 @@ +"""API endpoints for dataset alternate identifier""" from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -20,11 +22,14 @@ @api.route("/study//dataset//metadata/alternative-identifier") class DatasetAlternateIdentifierResource(Resource): + """Dataset Alternate Identifier Resource""" + @api.doc("identifier") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_identifier) def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argument + """Get dataset alternate identifier""" dataset_ = model.Dataset.query.get(dataset_id) dataset_identifier_ = dataset_.dataset_alternate_identifier return [d.to_dict() for d in dataset_identifier_] @@ -33,10 +38,59 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argum @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset alternate identifier""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 - # pylint: disable= unused-argument + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": { + "type": "string", + "minLength": 1, + }, + "type": { + "type": "string", + "enum": [ + "ark", + "arxiv", + "bibcode", + "doi", + "ean13", + "eissn", + "handle", + "igsn", + "isbn", + "issn", + "istc", + "lissn", + "lsid", + "pmid", + "purl", + "upc", + "url", + "urn", + "w3id", + "other", + ], + }, + }, + "required": ["identifier", "type"], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -63,12 +117,15 @@ def post(self, study_id: int, dataset_id: int): "metadata/alternative-identifier/" ) class DatasetAlternateIdentifierUpdate(Resource): + """Dataset Alternate Identifier Update Resource""" + @api.doc("delete identifier") @api.response(200, "Success") @api.response(400, "Validation Error") def delete( self, study_id: int, dataset_id: int, identifier_id: int ): # pylint: disable= unused-argument + """Delete dataset alternate identifier""" dataset_identifier_ = model.DatasetAlternateIdentifier.query.get( identifier_id ) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 7eded36e..3487d772 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,5 +1,7 @@ +"""API for dataset consent metadata""" from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -22,11 +24,14 @@ @api.route("/study//dataset//metadata/consent") class DatasetConsentResource(Resource): + """Dataset Consent Resource""" + @api.doc("consent") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_consent) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset consent""" dataset_ = model.Dataset.query.get(dataset_id) dataset_consent_ = dataset_.dataset_consent return dataset_consent_.to_dict() @@ -35,9 +40,42 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): + """Update dataset consent""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "type": {"type": "string", "minLength": 1}, + "details": { + "type": "string", + }, + "genetic_only": {"type": "boolean"}, + "geog_restrict": {"type": "boolean"}, + "no_methods": {"type": "boolean"}, + "noncommercial": {"type": "boolean"}, + "research_type": {"type": "boolean"}, + }, + "required": [ + "type", + "details", + "genetic_only", + "geog_restrict", + "no_methods", + "noncommercial", + "research_type", + ], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_consent.update(data) diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py index dd514624..64394d6e 100644 --- a/apis/dataset_metadata/dataset_contributor.py +++ b/apis/dataset_metadata/dataset_contributor.py @@ -1,7 +1,9 @@ +"""API for dataset contributor metadata""" from typing import Any, Union from flask import request from flask_restx import Resource +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -15,22 +17,102 @@ @api.route("/study//dataset//metadata/contributor") class DatasetContributorResource(Resource): + """Dataset Contributor Resource""" + @api.doc("contributor") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_contributor) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset contributor""" dataset_ = model.Dataset.query.get(dataset_id) dataset_contributor_ = dataset_.dataset_contributors + return [d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"]] @api.doc("update contributor") @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset contributor""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, can't modify dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "contributor_type": { + "type": "string", + "minLength": 1, + }, + "name": { + "type": "string", + "minLength": 1, + }, + "name_identifier": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme_uri": { + "type": "string", + }, + "name_type": { + "type": "string", + "enum": [ + "Personal", + "Organizational", + ], + "minLength": 1, + }, + "affiliations": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": { + "type": "string", + }, + "identifier": { + "type": "string", + }, + "scheme": { + "type": "string", + }, + "scheme_uri": { + "type": "string", + }, + }, + }, + "uniqueItems": True, + }, + }, + "required": [ + "contributor_type", + "name_type", + "name", + "affiliations", + "name_identifier", + "name_identifier_scheme", + ], + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -54,6 +136,8 @@ def post(self, study_id: int, dataset_id: int): "/study//dataset//metadata/contributor/" ) class DatasetContributorDelete(Resource): + """Dataset Contributor Delete Resource""" + @api.doc("delete contributor") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -63,6 +147,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument contributor_id: int, ): + """Delete dataset contributor""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 @@ -76,11 +161,14 @@ def delete( @api.route("/study//dataset//metadata/creator") class DatasetCreatorResource(Resource): + """Dataset Creator Resource""" + @api.doc("creator") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_contributor) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset creator""" dataset_ = model.Dataset.query.get(dataset_id) dataset_creator_ = dataset_.dataset_contributors return [d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"]] @@ -89,9 +177,80 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset creator""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "name": { + "type": "string", + "minLength": 1, + }, + "name_identifier": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme_uri": { + "type": "string", + }, + "name_type": { + "type": "string", + "enum": [ + "Personal", + "Organizational", + ], + "minLength": 1, + }, + "affiliations": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": { + "type": "string", + }, + "identifier": { + "type": "string", + }, + "scheme": { + "type": "string", + }, + "scheme_uri": { + "type": "string", + }, + }, + }, + "uniqueItems": True, + }, + }, + "required": [ + "name_type", + "name", + "affiliations", + "name_identifier", + "name_identifier_scheme", + ], + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -124,6 +283,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument creator_id: int, ): + """Delete dataset creator""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index da2f9f3a..d687f96c 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,7 +1,9 @@ +"""APIs for dataset date metadata""" from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -20,11 +22,14 @@ @api.route("/study//dataset//metadata/date") class DatasetDateResource(Resource): + """Dataset Date Resource""" + @api.doc("date") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_date) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset date""" dataset_ = model.Dataset.query.get(dataset_id) dataset_date_ = dataset_.dataset_date return [d.to_dict() for d in dataset_date_] @@ -33,9 +38,40 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset date""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "date": { + "type": "integer", + }, + "type": { + "type": "string", + "minLength": 1, + }, + "information": { + "type": "string", + }, + }, + "required": ["date", "type", "information"], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -56,12 +92,15 @@ def post(self, study_id: int, dataset_id: int): @api.route("/study//dataset//metadata/date/") class DatasetDateDeleteResource(Resource): + """Dataset Date Delete Resource""" + @api.doc("delete date") @api.response(200, "Success") @api.response(400, "Validation Error") def delete( self, study_id: int, dataset_id: int, date_id: int ): # pylint: disable= unused-argument + """Delete dataset date""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index e1974e7f..bb58d8f1 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -1,5 +1,8 @@ +"""APIs for dataset de-identification level""" + from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -22,11 +25,14 @@ @api.route("/study//dataset//metadata/de-identification-level") class DatasetDeIdentLevelResource(Resource): + """Dataset De-Identification Level Resource""" + @api.doc("de_ident_level") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(de_ident_level) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset de-identification level""" dataset_ = model.Dataset.query.get(dataset_id) de_ident_level_ = dataset_.dataset_de_ident_level return de_ident_level_.to_dict() @@ -35,9 +41,42 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): + """Update dataset de-identification level""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "type": {"type": "string", "minLength": 1}, + "details": { + "type": "string", + }, + "direct": {"type": "boolean"}, + "hipaa": {"type": "boolean"}, + "dates": {"type": "boolean"}, + "k_anon": {"type": "boolean"}, + "nonarr": {"type": "boolean"}, + }, + "required": [ + "type", + "details", + "direct", + "hipaa", + "dates", + "k_anon", + "nonarr", + ], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_de_ident_level.update(data) diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index fa2f3df8..3f86f022 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -1,7 +1,10 @@ +"""API endpoints for dataset description""" + from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -19,11 +22,14 @@ @api.route("/study//dataset//metadata/description") class DatasetDescriptionResource(Resource): + """Dataset Description Resource""" + @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_description) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset description""" dataset_ = model.Dataset.query.get(dataset_id) dataset_description_ = dataset_.dataset_description return [d.to_dict() for d in dataset_description_] @@ -32,9 +38,45 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset description""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "description": { + "type": "string", + "minLength": 1, + }, + "type": { + "type": "string", + "enum": [ + "Abstract", + "Methods", + "SeriesInformation", + "TableOfContents", + "TechnicalInfo", + "Other", + ], + }, + }, + "required": ["description", "type"], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -65,6 +107,8 @@ def post(self, study_id: int, dataset_id: int): "metadata/description/" ) class DatasetDescriptionUpdate(Resource): + """Dataset Description Update Resource""" + @api.doc("delete description") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -74,6 +118,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument description_id: int, ): + """Delete dataset description""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return ( diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 8b610a9e..0bdb5617 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,7 +1,9 @@ +"""API endpoints for dataset funder""" from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -24,11 +26,14 @@ @api.route("/study//dataset//metadata/funder") class DatasetFunderResource(Resource): + """Dataset Funder Resource""" + @api.doc("funder") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset_funder) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset funder""" dataset_ = model.Dataset.query.get(dataset_id) dataset_funder_ = dataset_.dataset_funder return [d.to_dict() for d in dataset_funder_] @@ -37,10 +42,46 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Update dataset funder""" data: Union[Any, dict] = request.json study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "award_number": {"type": "string", "minLength": 1}, + "award_title": {"type": "string"}, + "award_uri": {"type": "string"}, + "identifier": {"type": "string", "minLength": 1}, + "identifier_scheme_uri": {"type": "string"}, + "identifier_type": {"type": ["string", "null"]}, + }, + "required": [ + "name", + "award_number", + "award_title", + "award_uri", + "identifier", + "identifier_scheme_uri", + "identifier_type", + ], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] for i in data: @@ -60,6 +101,8 @@ def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argum @api.route("/study//dataset//metadata/funder/") class DatasetFunderUpdate(Resource): + """Dataset Funder Update Resource""" + @api.doc("delete funder") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -69,6 +112,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument funder_id: int, ): + """Delete dataset funder""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index e78e259f..25417dc0 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,5 +1,7 @@ +"""API endpoints for other dataset metadata""" from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -21,11 +23,14 @@ @api.route("/study//dataset//metadata/other") class DatasetOtherResource(Resource): + """Dataset Other Resource""" + @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset_other) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset other metadata""" dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other return dataset_other_.to_dict() @@ -35,9 +40,40 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(400, "Validation Error") @api.marshal_with(dataset_other) def put(self, study_id: int, dataset_id: int): + """Update dataset other metadata""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "acknowledgement": {"type": "string"}, + "language": {"type": "string"}, + "resource_type": {"type": "string"}, + "size": { + "type": "array", + "items": {"type": "string"}, + "uniqueItems": True, + }, + "standards_followed": {"type": "string"}, + }, + "required": [ + "acknowledgement", + "language", + "resource_type", + "size", + "standards_followed", + ], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_other.update(data) @@ -47,11 +83,14 @@ def put(self, study_id: int, dataset_id: int): @api.route("/study//dataset//metadata/publisher") class DatasetPublisherResource(Resource): + """Dataset Publisher Resource""" + @api.doc("publisher") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_publisher) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset publisher metadata""" dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other return dataset_other_.to_dict() @@ -60,9 +99,34 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): + """Update dataset publisher metadata""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "publisher": {"type": "string", "minLength": 1}, + "managing_organization_name": {"type": "string", "minLength": 1}, + "managing_organization_ror_id": { + "type": "string", + }, + }, + "required": [ + "publisher", + "managing_organization_name", + "managing_organization_ror_id", + ], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_other.update(data) diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index fe0613dc..d738082e 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,5 +1,7 @@ +"""API endpoints for dataset record keys""" from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -17,11 +19,14 @@ @api.route("/study//dataset//metadata/record-keys") class DatasetRecordKeysResource(Resource): + """Dataset Record Keys Resource""" + @api.doc("record keys") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_record_keys) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset record keys""" dataset_ = model.Dataset.query.get(dataset_id) dataset_record_keys_ = dataset_.dataset_record_keys @@ -31,9 +36,32 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): + """Update dataset record keys""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "type": {"type": "string", "minLength": 1}, + "details": { + "type": "string", + }, + }, + "required": [ + "type", + "details", + ], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data = request.json dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_record_keys.update(data) diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 35d938b8..1e3581c7 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,7 +1,9 @@ +"""API for dataset related item""" from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -19,11 +21,14 @@ @api.route("/study//dataset//metadata/related-item") class DatasetRelatedItemResource(Resource): + """Dataset Related Item Resource""" + @api.doc("related item") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(dataset_related_item) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset related item""" dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_ = dataset_.dataset_related_item return [d.to_dict() for d in dataset_related_item_] @@ -32,12 +37,167 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset related item""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return ( "Access denied, you can not" " make any change in dataset metadata" # noqa: E402 ), 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "contributors": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "contributor_type": {"type": "string", "minLength": 1}, + "name_type": { + "type": "string", + "enum": ["Personal", "Organizational"], + }, + }, + "required": ["contributor_type", "name_type", "name"], + }, + "uniqueItems": True, + }, + "creators": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "name_type": { + "type": "string", + "enum": ["Personal", "Organizational"], + }, + }, + "required": ["name", "name_type"], + }, + "uniqueItems": True, + }, + "edition": {"type": "string"}, + "first_page": {"type": "string"}, + "identifiers": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": {"type": "string", "minLength": 1}, + "metadata_scheme": {"type": "string"}, + "scheme_type": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "type": { + "type": "string", + "enum": [ + "ark", + "arxiv", + "bibcode", + "doi", + "ean13", + "eissn", + "handle", + "igsn", + "isbn", + "issn", + "istc", + "lissn", + "lsid", + "pmid", + "purl", + "upc", + "url", + "urn", + "w3id", + "other", + ], + }, + }, + "required": [ + "identifier", + "metadata_scheme", + "scheme_type", + "scheme_uri", + "type", + ], + }, + "uniqueItems": True, + }, + "issue": {"type": "string"}, + "last_page": {"type": "string"}, + "number_type": {"type": "string"}, + "number_value": {"type": "string"}, + "publication_year": {"type": ["string", "null"]}, + "publisher": {"type": "string"}, + "relation_type": {"type": "string", "minLength": 1}, + "titles": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "title": {"type": "string", "minLength": 1}, + "type": { + "type": "string", + "enum": [ + "MainTitle", + "AlternativeTitle", + "Subtitle", + "TranslatedTitle", + "OtherTitle", + "MainTitle", + ], + }, + }, + "required": ["title", "type"], + }, + "minItems": 1, + "uniqueItems": True, + }, + "type": {"type": "string", "minLength": 1}, + "volume": {"type": "string"}, + }, + "required": [ + "contributors", + "creators", + "edition", + "first_page", + "identifiers", + "issue", + "last_page", + "number_type", + "number_value", + "publication_year", + "publisher", + "relation_type", + "titles", + "type", + "volume", + ], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) for i in data: @@ -148,6 +308,8 @@ def post(self, study_id: int, dataset_id: int): "/study//dataset//metadata/related-item/" ) class DatasetRelatedItemUpdate(Resource): + """Dataset Related Item Update Resource""" + @api.doc("delete related item") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -157,6 +319,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument related_item_id: int, ): + """Delete dataset related item""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 @@ -173,6 +336,8 @@ def delete( "/contributor/" ) class RelatedItemContributorsDelete(Resource): + """Dataset Related Item Contributors Delete Resource""" + @api.doc("delete related item contributors") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -183,6 +348,7 @@ def delete( related_item_id: int, # pylint: disable= unused-argument contributor_id: int, ): + """Delete dataset related item contributors""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 @@ -200,6 +366,8 @@ def delete( "related-item//title/" ) class RelatedItemTitlesDelete(Resource): + """Dataset Related Item Titles Delete Resource""" + @api.doc("delete related item title") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -210,6 +378,7 @@ def delete( related_item_id: int, # pylint: disable= unused-argument title_id: int, ): + """Delete dataset related item title""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 @@ -224,6 +393,8 @@ def delete( "related-item//identifier/" ) class RelatedItemIdentifiersDelete(Resource): + """Dataset Related Item Identifiers Delete Resource""" + @api.doc("delete related item identifier") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -234,6 +405,7 @@ def delete( related_item_id: int, # pylint: disable= unused-argument identifier_id: int, ): + """Delete dataset related item identifier""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 @@ -250,6 +422,8 @@ def delete( "/creator/" # pylint: disable = line-too-long ) class RelatedItemCreatorDelete(Resource): + """Dataset Related Item Creator Delete Resource""" + @api.doc("delete related item creator") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -260,6 +434,7 @@ def delete( related_item_id: int, # pylint: disable= unused-argument creator_id: int, ): + """Delete dataset related item creator""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 350589be..e0b28fa2 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -1,7 +1,10 @@ +"""API endpoints for dataset rights""" + from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -21,12 +24,15 @@ @api.route("/study//dataset//metadata/rights") class DatasetRightsResource(Resource): + """Dataset Rights Resource""" + @api.doc("rights") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_rights) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset rights""" dataset_ = model.Dataset.query.get(dataset_id) dataset_rights_ = dataset_.dataset_rights return [d.to_dict() for d in dataset_rights_] @@ -35,9 +41,34 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset rights""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "rights": {"type": "string", "minLength": 1}, + "uri": {"type": "string"}, + }, + "required": ["identifier", "identifier_scheme", "rights", "uri"], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -58,6 +89,8 @@ def post(self, study_id: int, dataset_id: int): @api.route("/study//dataset//metadata/rights/") class DatasetRightsUpdate(Resource): + """Dataset Rights Update Resource""" + @api.doc("delete rights") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -67,6 +100,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument rights_id: int, ): + """Delete dataset rights""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 256aff56..1a8f1740 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -1,7 +1,10 @@ +"""API endpoints for dataset subject""" + from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -22,12 +25,15 @@ @api.route("/study//dataset//metadata/subject") class DatasetSubjectResource(Resource): + """Dataset Subject Resource""" + @api.doc("subject") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_subject) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset subject""" dataset_ = model.Dataset.query.get(dataset_id) dataset_subject_ = dataset_.dataset_subject return [d.to_dict() for d in dataset_subject_] @@ -36,9 +42,41 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset subject""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can't modify dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "classification_code": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "subject": {"type": "string", "minLength": 1}, + "value_uri": {"type": "string"}, + }, + "required": [ + "subject", + "scheme", + "scheme_uri", + "value_uri", + "classification_code", + ], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -59,6 +97,8 @@ def post(self, study_id: int, dataset_id: int): @api.route("/study//dataset//metadata/subject/") class DatasetSubjectUpdate(Resource): + """Dataset Subject Update Resource""" + @api.doc("delete subject") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -68,6 +108,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument subject_id: int, ): + """Delete dataset subject""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can't make change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index f452506c..b3d245c8 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,7 +1,9 @@ +"""API for dataset title metadata""" from typing import Any, Union from flask import request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.authentication import is_granted @@ -19,12 +21,15 @@ @api.route("/study//dataset//metadata/title") class DatasetTitleResource(Resource): + """Dataset Title Resource""" + @api.doc("title") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The dataset identifier") @api.marshal_with(dataset_title) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset title""" dataset_ = model.Dataset.query.get(dataset_id) dataset_title_ = dataset_.dataset_title return [d.to_dict() for d in dataset_title_] @@ -33,9 +38,45 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(200, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): + """Update dataset title""" study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "title": { + "type": "string", + "minLength": 1, + }, + "type": { + "type": "string", + "enum": [ + "MainTitle", + "AlternativeTitle", + "Subtitle", + "TranslatedTitle", + "OtherTitle", + "MainTitle", + ], + }, + }, + "required": ["title", "type"], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + data: Union[Any, dict] = request.json data_obj = model.Dataset.query.get(dataset_id) list_of_elements = [] @@ -63,6 +104,8 @@ def post(self, study_id: int, dataset_id: int): @api.route("/study//dataset//metadata/title/") class DatasetDescriptionUpdate(Resource): + """Dataset Title Update Resource""" + @api.doc("delete title") @api.response(200, "Success") @api.response(400, "Validation Error") @@ -72,6 +115,7 @@ def delete( dataset_id: int, # pylint: disable= unused-argument title_id: int, ): + """Delete dataset title""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return ( From f57c56be84bb271b41b765e6f1aad884f49d249f Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 7 Nov 2023 14:14:36 -0800 Subject: [PATCH 349/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20study=20design=20?= =?UTF-8?q?json=20schema?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/study_metadata/study_design.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index d4499d86..0b8fd32c 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -71,7 +71,7 @@ def put(self, study_id: int): "design_primary_purpose": {"type": ["string", "null"]}, "design_masking": {"type": ["string", "null"]}, "design_masking_description": { - "type": "string", + "type": ["string", "null"], }, "design_who_masked_list": { "type": "array", @@ -118,7 +118,7 @@ def put(self, study_id: int): }, "number_arms": {"type": ["integer", "null"]}, "design_observational_model_list": { - "type": "array", + "type": ["array", "null"], "items": { "type": "string", "oneOf": [ @@ -138,7 +138,7 @@ def put(self, study_id: int): "uniqueItems": True, }, "design_time_perspective_list": { - "type": "array", + "type": ["array", "null"], "items": { "type": "string", "oneOf": [ @@ -156,10 +156,10 @@ def put(self, study_id: int): }, "bio_spec_retention": {"type": ["string", "null"]}, "bio_spec_description": { - "type": "string", + "type": ["string", "null"], }, "target_duration": { - "type": "string", + "type": ["string", "null"], }, "number_groups_cohorts": {"type": ["integer", "null"]}, }, From a30d3cd156fced140c46115fa99c3560726798f0 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 7 Nov 2023 14:37:59 -0800 Subject: [PATCH 350/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20valid=20?= =?UTF-8?q?identifier=20type=20values?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dataset_alternate_identifier.py | 36 +++++++++---------- apis/dataset_metadata/dataset_related_item.py | 36 +++++++++---------- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index f81144a2..745be6c9 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -58,26 +58,26 @@ def post(self, study_id: int, dataset_id: int): "type": { "type": "string", "enum": [ - "ark", - "arxiv", + "ARK", + "arXiv", "bibcode", - "doi", - "ean13", - "eissn", - "handle", - "igsn", - "isbn", - "issn", - "istc", - "lissn", - "lsid", - "pmid", - "purl", - "upc", - "url", - "urn", + "DOI", + "EAN13", + "EISSN", + "Handle", + "IGSN", + "ISBN", + "ISSN", + "ISTC", + "LISSN", + "LSID", + "PMID", + "PURL", + "UPC", + "URL", + "URN", "w3id", - "other", + "Other", ], }, }, diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 1e3581c7..820163ba 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -104,26 +104,26 @@ def post(self, study_id: int, dataset_id: int): "type": { "type": "string", "enum": [ - "ark", - "arxiv", + "ARK", + "arXiv", "bibcode", - "doi", - "ean13", - "eissn", - "handle", - "igsn", - "isbn", - "issn", - "istc", - "lissn", - "lsid", - "pmid", - "purl", - "upc", - "url", - "urn", + "DOI", + "EAN13", + "EISSN", + "Handle", + "IGSN", + "ISBN", + "ISSN", + "ISTC", + "LISSN", + "LSID", + "PMID", + "PURL", + "UPC", + "URL", + "URN", "w3id", - "other", + "Other", ], }, }, From 09c836eaf358cf5f028414e22e004c825291d8b8 Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Thu, 9 Nov 2023 17:47:54 -0800 Subject: [PATCH 351/505] test: :white_check_mark: add dataset and dataset metadata tests (#22) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * wip: pytest for dataset/dataset metadata api * style: 🎨 fix code style issues with Black * feat: POST test for dataset contributor metadata * feat: GET and DELETE test for dataset contributor metadata * feat: dataset creator metadata test for GET, POST, DELETE * style: 🎨 fix code style issues with Black * feat: GET and POST test for dataset dates metadata * feat: DELETE test for dataset dates metadata * feat: de-identification level tests for GET and PUT * style: 🎨 fix code style issues with Black * feat: GET dataset description test created * style: 🎨 fix code style issues with Black * feat: dataset description metadata tests for GET, POST, DELETE * GET test for dataset funder metadata * feat: POST test for dataset funder metadata * feat: delete dataset funder metadata test created * feat: GET and PUT tests created for other dataset metadata * GET dataset publisher metadata test created * feat: PUT dataset publisher test created * feat: GET and PUT tests created for record keys metadata * wip: fixes for tests and refactoring fixture to sign in only once * style: 🎨 fix code style issues with Black * feat: GET test for dataset related items created * fix: jsonschema type for publication year in related_items endpoint * feat: POST test for dataset related_items endpoint * feat: DELETE test created for dataset related item * feat: DELETE test created for related item contributor metadata * feat: DELETE test created for related item creator metadata * feat: DELETE test created for related item creator metadata * feat: DELETE test created for related item title metadata * feat: GET test for dataset rights metadata * POST test created for dataset rights metadata * DELETE test created for dataset rights metadata * GET test created for dataset subjects metadata * feat: POST test created for dataset subjects metadata * feat: DELETE test created for dataset subjects metadata * feat: GET test created for dataset title metadata * feat: DELETE test created for dataset title metadata * feat: POST test created for dataset title metadata * feat: created global variables needed for testing * style: 🎨 fix code style issues with Black * feat: pytest will log user in once per test session * fix: GET and PUT readme endpoint patched * feat: DELETE and PUT tests created for study api * refactor: variable created for temporary study id * refactor: update fixtures that are used for test (logged in user persists) * fix: return study id in PUT study description endpoint * fix: study other metadata returns study id in GET and PUT request * refactor: cleaned tests and updated based off endpoint updates * refactor: DELETE dataset endpoint returns 204 upon success * feat: DELETE test created for dataset endpoint * feat: added more global variables for testing * feat: POST test created for creating a dataset version * feat: GET test created for getting all dataset versions * feat: GET test created for getting one dataset version * wip: PUT test skeleton created for updating dataset version * refactor: updated fixtures for study metadata * refactor: line too long fixed * refactor: fixtures updated for dataset metadata to persistent logged in sessions * fix: return number for status and message for body in study identification DELETE request * fix: dataset other metadata returns correct column for standards_followed * refactor: normalized layout throughout testing files * fix: create secondary study identifiers to then delete in next test * feat: sign in user before study tests begins * chore: remove pylint warning * chore: remove line too long warnings * style: 🎨 fix code style issues with Black * chore: remove uncommented code that is necessary * 🚨 chore: fix linter issues --------- Co-authored-by: Lint Action Co-authored-by: Sanjay Soundarajan --- .pylint.ini | 1 + apis/dataset.py | 9 +- apis/dataset_metadata/dataset_readme.py | 6 +- apis/dataset_metadata/dataset_related_item.py | 2 +- apis/study_metadata/study_contact.py | 3 +- apis/study_metadata/study_identification.py | 2 +- apis/user.py | 3 +- model/dataset_metadata/dataset_other.py | 2 +- model/dataset_metadata/dataset_readme.py | 1 + model/study_metadata/study_description.py | 1 + model/study_metadata/study_other.py | 1 + tests/conftest.py | 36 +- tests/functional/test_server_launch.py | 5 + tests/functional/test_study_api.py | 86 +- tests/functional/test_study_dataset_api.py | 159 ++- .../test_study_dataset_metadata_api.py | 1183 +++++++++++++++++ tests/functional/test_study_metadata_api.py | 334 +++-- 17 files changed, 1638 insertions(+), 196 deletions(-) diff --git a/.pylint.ini b/.pylint.ini index 965d3ef4..892581db 100644 --- a/.pylint.ini +++ b/.pylint.ini @@ -59,6 +59,7 @@ disable= too-few-public-methods, too-many-locals, too-many-arguments, + line-too-long, unnecessary-pass, broad-except, duplicate-code, diff --git a/apis/dataset.py b/apis/dataset.py index adc97c14..dc609a75 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -88,15 +88,19 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(400, "Validation Error") def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) + if not is_granted("update_dataset", study): return "Access denied, you can not modify", 403 + data: typing.Union[dict, typing.Any] = request.json data_obj = model.Dataset.query.get(dataset_id) + data_obj.update(data) model.db.session.commit() + return data_obj.to_dict() - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) @@ -107,8 +111,7 @@ def delete(self, study_id: int, dataset_id: int): model.db.session.delete(version) model.db.session.delete(data_obj) model.db.session.commit() - dataset_ = study.dataset - return [d.to_dict() for d in dataset_], 201 + return 204 # def delete(self, study_id: int, dataset_id: int, version_id: int): # data_obj = Dataset.query.get(dataset_id) diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py index ec8101f6..edd1e0ad 100644 --- a/apis/dataset_metadata/dataset_readme.py +++ b/apis/dataset_metadata/dataset_readme.py @@ -20,7 +20,7 @@ class DatasetReadmeResource(Resource): def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument dataset_ = model.Dataset.query.get(dataset_id) dataset_readme_ = dataset_.dataset_readme - return [d.to_dict() for d in dataset_readme_] + return dataset_readme_.to_dict() def put(self, study_id: int, dataset_id: int): study_obj = model.Study.query.get(study_id) @@ -28,6 +28,6 @@ def put(self, study_id: int, dataset_id: int): return "Access denied, you can not make any change in dataset metadata", 403 data = request.json dataset_ = model.Dataset.query.get(dataset_id) - dataset_readme_ = dataset_.dataset_readme.update(data) + dataset_.dataset_readme.update(data) model.db.session.commit() - return dataset_readme_.to_dict() + return dataset_.dataset_readme.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 820163ba..df5bca5b 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -141,7 +141,7 @@ def post(self, study_id: int, dataset_id: int): "last_page": {"type": "string"}, "number_type": {"type": "string"}, "number_value": {"type": "string"}, - "publication_year": {"type": ["string", "null"]}, + "publication_year": {"type": ["integer", "null"]}, "publisher": {"type": "string"}, "relation_type": {"type": "string", "minLength": 1}, "titles": { diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 129ac385..ef4def18 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -48,9 +48,8 @@ def post(self, study_id: int): """Create study contact metadata""" def validate_is_valid_email(instance): - print("within is_valid_email") email_address = instance - print(email_address) + try: validate_email(email_address) return True diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index b137b708..85236bc5 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -126,7 +126,7 @@ def delete(self, study_id: int, identification_id: int): identification_id ) if not study_identification_.secondary: - return 400, "primary identifier can not be deleted" + return "primary identifier can not be deleted", 400 model.db.session.delete(study_identification_) model.db.session.commit() diff --git a/apis/user.py b/apis/user.py index dd0c1401..f4458cbf 100644 --- a/apis/user.py +++ b/apis/user.py @@ -49,9 +49,8 @@ def put(self): """Updates user details""" def validate_is_valid_email(instance): - print("within is_valid_email") email_address = instance - print(email_address) + try: validate_email(email_address) return True diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 00694625..5a96918f 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -37,7 +37,7 @@ def to_dict(self): "language": self.language, "managing_organization_name": self.managing_organization_name, "managing_organization_ror_id": self.managing_organization_ror_id, - "standards_followed": self.managing_organization_ror_id, + "standards_followed": self.standards_followed, "acknowledgement": self.acknowledgement, "size": self.size, "publisher": self.publisher, diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py index b443c95b..7fa75b3a 100644 --- a/model/dataset_metadata/dataset_readme.py +++ b/model/dataset_metadata/dataset_readme.py @@ -16,6 +16,7 @@ def __init__(self, dataset): def to_dict(self): return { + "id": self.dataset_id, "content": self.content, } diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index c13d8edf..be3289da 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -27,6 +27,7 @@ def __init__(self, study: Study): def to_dict(self): """Converts the study to a dictionary""" return { + "id": self.study_id, "brief_summary": self.brief_summary, "detailed_description": self.detailed_description, } diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index d23acac7..f66758e9 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -34,6 +34,7 @@ def __init__(self, study): def to_dict(self): """Converts the study to a dictionary""" return { + "id": self.study_id, "oversight_has_dmc": self.oversight_has_dmc, "conditions": self.conditions, "keywords": self.keywords, diff --git a/tests/conftest.py b/tests/conftest.py index 32c2f8e7..ceb29982 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,8 +16,8 @@ os.environ["FLASK_ENV"] = "testing" # Set global variable for study ID +# Study variables use for testing pytest.global_study_id = {} -pytest.global_dataset_id = "" pytest.global_version_id = "" pytest.global_arm_id = "" pytest.global_available_ipd_id = "" @@ -29,16 +29,35 @@ pytest.global_overall_official_id = "" pytest.global_reference_id = "" +# Dataset variables use for testing +pytest.global_dataset_id = "" +pytest.global_dataset_version_id = "" +pytest.global_alternative_identifier_id = "" +pytest.global_dataset_contributor_id = "" +pytest.global_dataset_creator_id = "" +pytest.global_dataset_date_id = "" +pytest.global_dataset_description_id = "" +pytest.global_dataset_funder_id = "" +pytest.global_dataset_related_item_id = "" +pytest.global_dataset_related_item_contributor_id = "" +pytest.global_dataset_related_item_creator_id = "" +pytest.global_dataset_related_item_identifier_id = "" +pytest.global_dataset_related_item_title_id = "" +pytest.global_dataset_rights_id = "" +pytest.global_dataset_subject_id = "" +pytest.global_dataset_title_id = "" + # Create the flask app for testing -@pytest.fixture() +@pytest.fixture(scope="session") def flask_app(): """An application for the tests.""" yield create_app(config_module="pytest_config") # Create a test client for the app -@pytest.fixture() +# pylint: disable=redefined-outer-name +@pytest.fixture(scope="session") def _test_client(flask_app): """A test client for the app.""" with flask_app.test_client() as _test_client: @@ -46,6 +65,7 @@ def _test_client(flask_app): # Empty local database for testing +# pylint: disable=redefined-outer-name @pytest.fixture() def _empty_db(flask_app): """Empty the local database.""" @@ -64,7 +84,7 @@ def _create_user(_test_client): response = _test_client.post( "/auth/signup", json={ - "email_address": "sample@gmail.com", + "email_address": "test@fairhub.io", "password": "Testingyeshello11!", "code": "7654321", }, @@ -74,16 +94,18 @@ def _create_user(_test_client): # Fixture to sign in the user for module testing -@pytest.fixture() -def _login_user(_test_client): +@pytest.fixture(scope="session") +def _logged_in_client(_test_client): """Sign in the user for testing.""" with unittest.mock.patch("pytest_config.TestConfig", TestConfig): response = _test_client.post( "/auth/login", json={ - "email_address": "sample@gmail.com", + "email_address": "test@fairhub.io", "password": "Testingyeshello11!", }, ) assert response.status_code == 200 + + yield _test_client diff --git a/tests/functional/test_server_launch.py b/tests/functional/test_server_launch.py index a7b65e02..48eca89b 100644 --- a/tests/functional/test_server_launch.py +++ b/tests/functional/test_server_launch.py @@ -26,3 +26,8 @@ def test_db_empty(_test_client, _empty_db, _create_user): """Test that the database is empty.""" print("Database empty") print("User created for testing") + + +def test_signin_user(_logged_in_client): + """Signs in user before testing.""" + print("User signed in for testing") diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index 0871f278..da912975 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -4,92 +4,106 @@ import pytest -def test_post_study(_test_client, _login_user): +def test_post_study(_logged_in_client): """ Given a Flask application configured for testing and a study WHEN the '/study' endpoint is requested (POST) THEN check that the response is valid """ # Crate a test using the Flask application configured for testing - response = _test_client.post( + response = _logged_in_client.post( "/study", json={ "title": "Study Title", "image": "https://api.dicebear.com/6.x/adventurer/svg", }, ) - response_data = json.loads(response.data) assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["title"] == "Study Title" assert response_data["image"] == "https://api.dicebear.com/6.x/adventurer/svg" pytest.global_study_id = response_data -def test_get_all_studies(_test_client, _login_user): +def test_get_all_studies(_logged_in_client): """ GIVEN a Flask application configured for testing WHEN the '/study' endpoint is requested (GET) THEN check that the response is valid """ - response = _test_client.get("/study") + response = _logged_in_client.get("/study") + assert response.status_code == 200 response_data = json.loads(response.data) + assert len(response_data) == 1 # Only one study created - assert response.status_code == 200 -def test_update_study(_test_client, _login_user): +def test_update_study(_logged_in_client): """ GIVEN a study ID WHEN the '/study' endpoint is requested (PUT) THEN check that the study is updated with the inputed data """ - # study_id = pytest.global_study_id["id"] - # response = _test_client.put( - # f"/study/{study_id}", - # json={ - # "id": pytest.global_study_id["id"], - # "title": "Study Title Updated", - # "image": pytest.global_study_id["image"], - # }, - # ) - # response_data = json.loads(response.data) - - # assert response.status_code == 200 - # assert response_data["title"] == "Study Title Updated" - # assert response_data["image"] == pytest.global_study_id["image"] - # assert response_data["id"] == pytest.global_study_id["id"] - # pytest.global_study_id = response_data - - -def test_get_study_by_id(_test_client, _login_user): + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.put( + f"/study/{study_id}", + json={ + "title": "Study Title Updated", + "image": pytest.global_study_id["image"], # type: ignore + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_study_id = response_data + + assert response_data["title"] == "Study Title Updated" + assert response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert response_data["id"] == pytest.global_study_id["id"] # type: ignore + + +def test_get_study_by_id(_logged_in_client): """ GIVEN a study ID WHEN the '/study/{study_id}' endpoint is requested (GET) THEN check that the response is valid """ - response = _test_client.get(f"/study/{pytest.global_study_id['id']}") # type: ignore # pylint: disable=line-too-long # noqa: E501 + response = _logged_in_client.get(f"/study/{pytest.global_study_id['id']}") # type: ignore # pylint: disable=line-too-long # noqa: E501 # Convert the response data from JSON to a Python dictionary + assert response.status_code == 200 response_data = json.loads(response.data) # Check the response is correct - assert response.status_code == 200 assert response_data["id"] == pytest.global_study_id["id"] # type: ignore assert response_data["title"] == pytest.global_study_id["title"] # type: ignore assert response_data["image"] == pytest.global_study_id["image"] # type: ignore -def test_delete_studies_created(_test_client, _login_user): +def test_delete_studies_created(_logged_in_client): """ Given a Flask application configured for testing - WHEN the '/study' endpoint is requested (DELETE) + WHEN the '/study/{study_id}' endpoint is requested (DELETE) THEN check that the response is valid (200) - THEN the '/study' endpoint is requested (GET) - THEN check if the study created has been deleted """ - print("delete study created") - # TODO: DELETE ENDPOINT NOT WORKING - # with flask_app._test_client() as _test_client: - # response = _test_client.post("/study", json={ + # create study first to then delete + response = _logged_in_client.post( + "/study", + json={ + "title": "Delete Me", + "image": "https://api.dicebear.com/6.x/adventurer/svg", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + study_id = response_data["id"] + + # delete study + response = _logged_in_client.delete(f"/study/{study_id}") + + assert response.status_code == 200 diff --git a/tests/functional/test_study_dataset_api.py b/tests/functional/test_study_dataset_api.py index 8d03b7d6..8471496d 100644 --- a/tests/functional/test_study_dataset_api.py +++ b/tests/functional/test_study_dataset_api.py @@ -4,34 +4,163 @@ import pytest -def test_get_all_dataset_from_study(_test_client, _login_user): +def test_get_all_dataset_from_study(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/dataset/{study_id}' endpoint is requested (GET) THEN check that the response is valid and retrieves the dataset content """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/dataset") - response_data = json.loads(response.data) + + response = _logged_in_client.get(f"/study/{study_id}/dataset") + assert response.status_code == 200 - print(response_data) + # response_data = json.loads(response.data) + # print(response_data) -def test_post_dataset(_test_client, _login_user): +def test_post_dataset(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/dataset/{study_id}' endpoint is requested (POST) THEN check that the response is valid and creates a dataset """ - # study_id = pytest.global_study_id["id"] - # response = _test_client.post( - # f"/study/{study_id}/dataset", - # json={ - # "id": study_id, - # }, - # ) + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.post( + f"/study/{study_id}/dataset", + json={ + "title": "Dataset Title", + "description": "Dataset Description", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_id = response_data["id"] + + +def test_get_dataset_from_study(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/dataset/{study_id}/{dataset_id}' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id - # assert response.status_code == 200 + response = _logged_in_client.get(f"/study/{study_id}/dataset/{dataset_id}") + + assert response.status_code == 200 # response_data = json.loads(response.data) - # pytest.global_dataset_id = response_data["id"] - # print(pytest.global_dataset_id) + + +def test_delete_dataset_from_study(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/dataset/{study_id}/{dataset_id}' endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + """ + # create a new dataset and delete it afterwards + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.post( + f"/study/{study_id}/dataset", + json={ + "title": "Delete Me", + "description": "Dataset Description", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + dataset_id = response_data["id"] + + # delete dataset + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}", + ) + + assert response.status_code == 200 + + +def test_post_dataset_version(_logged_in_client): + """ + Given a Flask application configured for testing, study ID and a dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/version' + endpoint is requested (POST) + Then check that the response is valid and creates a dataset version + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/version", + json={ + "title": "Dataset Version 1.0", + "published": False, + "doi": "doi:test", + "changelog": "changelog testing here", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_version_id = response_data["id"] + + assert response_data["title"] == "Dataset Version 1.0" + assert response_data["published"] is False + assert response_data["doi"] == "doi:test" + assert response_data["changelog"] == "changelog testing here" + + +def test_get_all_dataset_versions(_logged_in_client): + """ + Given a Flask application configured for testing, study ID and a dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/version' endpoint is requested (GET) + Then check that the response is valid and retrieves all dataset versions + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version", + ) + + assert response.status_code == 200 + + +def test_get_dataset_version(_logged_in_client): + """ + Given a Flask application configured for testing, study ID, dataset ID and version ID + When the '/study/{study_id}/dataset/{dataset_id}/version/{version_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset version + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + version_id = pytest.global_dataset_version_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + ) + + assert response.status_code == 200 + + +def test_put_dataset_version(_logged_in_client): + """ + Given a Flask application configured for testing, study ID, dataset ID and version ID + When the '/study/{study_id}/dataset/{dataset_id}/version/{version_id}' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset version + """ + # study_id = pytest.global_study_id["id"] + # dataset_id = pytest.global_dataset_id + # version_id = pytest.global_dataset_version_id + + # response = _logged_in_client.put( + # f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + # json={} + # ) + # WIP endpoint currently not implemented diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 0d50d2ae..50bc66b3 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -1 +1,1184 @@ +# pylint: disable=too-many-lines """Tests for the Dataset's Metadata API endpoints""" +import json + +import pytest + + +# ------------------- ACCESS METADATA ------------------- # +def test_get_dataset_access_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset access metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + ) + assert response.status_code == 200 + + +def test_put_dataset_access_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (PUT) + Then check that the response is valid and updates the dataset access metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + json={ + "type": "type", + "description": "description", + "url": "google.com", + "url_last_checked": 123, + }, + ) + + response_data = json.loads(response.data) + assert response.status_code == 200 + + assert response_data["type"] == "type" + assert response_data["description"] == "description" + assert response_data["url"] == "google.com" + assert response_data["url_last_checked"] == 123 + + +# ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # +def test_post_alternative_identifier(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset alternative identifier + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "identifier test", + "type": "ark", + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_alternative_identifier_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "identifier test" + assert response_data[0]["type"] == "ark" + + +def test_get_alternative_identifier(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset alternative identifier content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + + assert response.status_code == 200 + + +def test_delete_alternative_identifier(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset alternative identifier content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + identifier_id = pytest.global_alternative_identifier_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + ) + + assert response.status_code == 200 + + +# ------------------- CONSENT METADATA ------------------- # +def test_get_dataset_consent_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset consent metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + ) + + assert response.status_code == 200 + + +def test_put_dataset_consent_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (PUT) + Then check that the response is valid and updates the dataset consent metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + json={ + "type": "test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "test", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["type"] == "test" + assert response_data["noncommercial"] is True + assert response_data["geog_restrict"] is True + assert response_data["research_type"] is True + assert response_data["genetic_only"] is True + assert response_data["no_methods"] is True + assert response_data["details"] == "test" + + +# ------------------- CONTRIBUTOR METADATA ------------------- # +def test_post_dataset_contributor_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset contributor metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", + json=[ + { + "name": "Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_contributor_id = response_data[0]["id"] + + assert response_data[0]["name"] == "Name here" + assert response_data[0]["name_type"] == "Personal" + assert response_data[0]["name_identifier"] == "Name identifier" + assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert response_data[0]["creator"] is False + assert response_data[0]["contributor_type"] == "Con Type" + assert response_data[0]["affiliations"][0]["name"] == "Test" + assert response_data[0]["affiliations"][0]["identifier"] == "yes" + assert response_data[0]["affiliations"][0]["scheme"] == "uh" + assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + +def test_get_dataset_contributor_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset contributor metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" + ) + + assert response.status_code == 200 + + +def test_delete_dataset_contributor_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset contributor metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + contributor_id = pytest.global_dataset_contributor_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" + ) + + assert response.status_code == 200 + + +# ------------------- CREATOR METADATA ------------------- # +def test_get_dataset_creator_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset creator metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" + ) + + assert response.status_code == 200 + + +def test_post_dataset_creator_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (POST) + Then check that the response is valid and creates the dataset creator metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", + json=[ + { + "name": "Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_creator_id = response_data[0]["id"] + + assert response_data[0]["name"] == "Name here" + assert response_data[0]["name_type"] == "Personal" + assert response_data[0]["name_identifier"] == "Name identifier" + assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert response_data[0]["creator"] is True + assert response_data[0]["affiliations"][0]["name"] == "Test" + assert response_data[0]["affiliations"][0]["identifier"] == "yes" + assert response_data[0]["affiliations"][0]["scheme"] == "uh" + assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + +def test_delete_dataset_creator_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset creator metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + creator_id = pytest.global_dataset_creator_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" + ) + + assert response.status_code == 200 + + +# ------------------- DATE METADATA ------------------- # +def test_get_dataset_date_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset date metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date" + ) + + assert response.status_code == 200 + + +def test_post_dataset_date_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset date metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date", + json=[{"date": 20210101, "type": "Type", "information": "Info"}], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_date_id = response_data[0]["id"] + + assert response_data[0]["date"] == 20210101 + assert response_data[0]["type"] == "Type" + assert response_data[0]["information"] == "Info" + + +def test_delete_dataset_date_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset date metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + date_id = pytest.global_dataset_date_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + ) + + assert response.status_code == 200 + + +# ------------------- DE-IDENTIFICATION LEVEL METADATA ------------------- # +def test_get_dataset_deidentification_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + de-identification metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + ) + + assert response.status_code == 200 + + +def test_put_dataset_deidentification_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + de-identification metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", + json={ + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["type"] == "Level" + assert response_data["direct"] is True + assert response_data["hipaa"] is True + assert response_data["dates"] is True + assert response_data["nonarr"] is True + assert response_data["k_anon"] is True + assert response_data["details"] == "Details" + + +# ------------------- DESCRIPTION METADATA ------------------- # +def test_get_dataset_descriptions_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + ) + + assert response.status_code == 200 + + +def test_post_dataset_description_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + description metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description", + json=[{"description": "Description", "type": "Methods"}], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_description_id = response_data[0]["id"] + + assert response_data[0]["description"] == "Description" + assert response_data[0]["type"] == "Methods" + + +def test_delete_dataset_description_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + description metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + description_id = pytest.global_dataset_description_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" + ) + + assert response.status_code == 200 + + +# ------------------- FUNDER METADATA ------------------- # +def test_get_dataset_funder_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + funder metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + ) + + assert response.status_code == 200 + + +def test_post_dataset_funder_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + funder metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + json=[ + { + "name": "Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_funder_id = response_data[0]["id"] + + assert response_data[0]["name"] == "Name" + assert response_data[0]["award_number"] == "award number" + assert response_data[0]["award_title"] == "Award Title" + assert response_data[0]["award_uri"] == "Award URI" + assert response_data[0]["identifier"] == "Identifier" + assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert response_data[0]["identifier_type"] == "Identifier Type" + + +def test_delete_dataset_funder_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + funder metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + funder_id = pytest.global_dataset_funder_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + ) + + assert response.status_code == 200 + + +# ------------------- OTHER METADATA ------------------- # +def test_get_other_dataset_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + other metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + + assert response.status_code == 200 + + +def test_put_other_dataset_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + other metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "standards_followed": "Standards Followed", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["acknowledgement"] == "Yes" + assert response_data["language"] == "English" + # assert response_data["resource_type"] == "Resource Type" # CURRENTLY NOT BEING RETURNED + assert response_data["size"] == ["Size"] + assert response_data["standards_followed"] == "Standards Followed" + # ABOVE STATEMENT CURRENTLY NOT BEING UPDATED + + +# ------------------- PUBLICATION METADATA ------------------- # +def test_get_dataset_publisher_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + publisher metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + ) + + assert response.status_code == 200 + + +def test_put_dataset_publisher_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + publisher metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + json={ + "publisher": "Publisher", + "managing_organization_name": "Managing Organization Name", + "managing_organization_ror_id": "Managing Organization ROR ID", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["publisher"] == "Publisher" + assert response_data["managing_organization_name"] == "Managing Organization Name" + assert ( + response_data["managing_organization_ror_id"] == "Managing Organization ROR ID" + ) + + +# ------------------- README METADATA ------------------- # +def test_get_dataset_readme_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + readme metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/readme" + ) + + assert response.status_code == 200 + + +def test_put_dataset_readme_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + readme metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/readme", + json={ + "content": "This is the readme content", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["content"] == "This is the readme content" + + +# ------------------- RECORD KEYS METADATA ------------------- # +def test_get_dataset_record_keys_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + record keys metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" + ) + + assert response.status_code == 200 + + +def test_put_dataset_record_keys_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + record keys metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", + json={"type": "Record Type", "details": "Details for Record Keys"}, + ) + + assert response.status_code == 201 + response_data = json.loads(response.data) + + assert response_data["key_type"] == "Record Type" + assert response_data["key_details"] == "Details for Record Keys" + + +# ------------------- RELATED ITEM METADATA ------------------- # +def test_get_dataset_related_item_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + related item metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + + assert response.status_code == 200 + + +def test_post_dataset_related_item_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + related item metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + json=[ + { + "contributors": [ + { + "name": "Ndafsdame", + "contributor_type": "Con Type", + "name_type": "Personal", + } + ], + "creators": [{"name": "Name", "name_type": "Personal"}], + "edition": "Edition", + "first_page": "First Page", + "identifiers": [ + { + "identifier": "Identifier", + "metadata_scheme": "Metadata Scheme", + "scheme_type": "Scheme Type", + "scheme_uri": "Scheme URI", + "type": "ark", + } + ], + "issue": "Issue", + "last_page": "Last Page", + "number_type": "Number Type", + "number_value": "Number Value", + "publication_year": 2013, + "publisher": "Publisher", + "relation_type": "Relation Type", + "titles": [{"title": "Title", "type": "MainTitle"}], + "type": "Type", + "volume": "Volume", + } + ], + ) + + assert response.status_code == 201 + response_data = json.loads(response.data) + pytest.global_dataset_related_item_id = response_data[0]["id"] + pytest.global_dataset_related_item_contributor_id = response_data[0][ + "contributors" + ][0]["id"] + pytest.global_dataset_related_item_creator_id = response_data[0]["creators"][0][ + "id" + ] + pytest.global_dataset_related_item_identifier_id = response_data[0]["identifiers"][ + 0 + ]["id"] + pytest.global_dataset_related_item_title_id = response_data[0]["titles"][0]["id"] + + assert response_data[0]["contributors"][0]["name"] == "Ndafsdame" + assert response_data[0]["contributors"][0]["contributor_type"] == "Con Type" + assert response_data[0]["contributors"][0]["name_type"] == "Personal" + assert response_data[0]["creators"][0]["name"] == "Name" + assert response_data[0]["creators"][0]["name_type"] == "Personal" + assert response_data[0]["edition"] == "Edition" + assert response_data[0]["first_page"] == "First Page" + assert response_data[0]["identifiers"][0]["identifier"] == "Identifier" + assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" + assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" + assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" + assert response_data[0]["identifiers"][0]["type"] == "ark" + assert response_data[0]["issue"] == "Issue" + assert response_data[0]["last_page"] == "Last Page" + assert response_data[0]["number_type"] == "Number Type" + assert response_data[0]["number_value"] == "Number Value" + assert response_data[0]["publication_year"] == 2013 + assert response_data[0]["publisher"] == "Publisher" + assert response_data[0]["relation_type"] == "Relation Type" + assert response_data[0]["titles"][0]["title"] == "Title" + assert response_data[0]["titles"][0]["type"] == "MainTitle" + assert response_data[0]["type"] == "Type" + assert response_data[0]["volume"] == "Volume" + + +def test_delete_dataset_related_item_contributor_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + related item metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + related_item_id = pytest.global_dataset_related_item_id + contributor_id = pytest.global_dataset_related_item_contributor_id + + # pylint: disable=line-too-long + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{contributor_id}" + ) + + assert response.status_code == 200 + + +def test_delete_dataset_related_item_creator_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + related item metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + related_item_id = pytest.global_dataset_related_item_id + creator_id = pytest.global_dataset_related_item_creator_id + + # pylint: disable=line-too-long + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{creator_id}" + ) + + assert response.status_code == 200 + + +def test_delete_dataset_related_item_identifier_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + related item metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + related_item_id = pytest.global_dataset_related_item_id + identifier_id = pytest.global_dataset_related_item_identifier_id + + # pylint: disable=line-too-long + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{identifier_id}" + ) + + assert response.status_code == 200 + + +def test_delete_dataset_related_item_title_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + related item metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + related_item_id = pytest.global_dataset_related_item_id + title_id = pytest.global_dataset_related_item_title_id + + # pylint: disable=line-too-long + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{title_id}" + ) + + assert response.status_code == 200 + + +def test_delete_dataset_related_item_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + related item metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + related_item_id = pytest.global_dataset_related_item_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}" + ) + + assert response.status_code == 200 + + +# ------------------- RIGHTS METADATA ------------------- # +def test_get_dataset_rights_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + rights metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" + ) + + assert response.status_code == 200 + + +def test_post_dataset_rights_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + rights metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", + json=[ + { + "identifier": "Identifier", + "identifier_scheme": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_rights_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "Identifier" + assert response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert response_data[0]["rights"] == "Rights" + assert response_data[0]["uri"] == "URI" + + +def test_delete_dataset_rights_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/rights' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + rights metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + rights_id = pytest.global_dataset_rights_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" + ) + + assert response.status_code == 200 + + +# ------------------- SUBJECTS METADATA ------------------- # +def test_get_dataset_subjects_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + subjects metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject" + ) + + assert response.status_code == 200 + + +def test_post_dataset_subjects_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + subjects metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", + json=[ + { + "classification_code": "Classification Code", + "scheme": "Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Value URI", + } + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_subject_id = response_data[0]["id"] + + assert response_data[0]["scheme"] == "Scheme" + assert response_data[0]["scheme_uri"] == "Scheme URI" + assert response_data[0]["subject"] == "Subject" + assert response_data[0]["value_uri"] == "Value URI" + + +def test_delete_dataset_subject_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + subject metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + subject_id = pytest.global_dataset_subject_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" + ) + + assert response.status_code == 200 + + +# ------------------- TITLE METADATA ------------------- # +def test_get_dataset_title_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + title metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + ) + + assert response.status_code == 200 + + +def test_post_dataset_title_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + title metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title", + json=[{"title": "Title", "type": "Subtitle"}], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + pytest.global_dataset_title_id = response_data[0]["id"] + + assert response_data[0]["title"] == "Title" + assert response_data[0]["type"] == "Subtitle" + + +def test_delete_dataset_title_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + title metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + title_id = pytest.global_dataset_title_id + + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" + ) + + assert response.status_code == 200 diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index df3000ca..08296ca3 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines """Tests for the Study Metadata API endpoints""" import json @@ -5,14 +6,15 @@ # ------------------- ARM METADATA ------------------- # -def test_post_arm_metadata(_test_client, _login_user): +def test_post_arm_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/arm' endpoint is requested (POST) THEN check that the response is vaild and create a new arm """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/arm", json=[ { @@ -24,9 +26,10 @@ def test_post_arm_metadata(_test_client, _login_user): ], ) + assert response.status_code == 200 response_data = json.loads(response.data) + pytest.global_arm_id = response_data["arms"][0]["id"] - assert response.status_code == 200 assert response_data["arms"][0]["label"] == "Label1" assert response_data["arms"][0]["type"] == "Experimental" assert response_data["arms"][0]["description"] == "Arm Description" @@ -34,19 +37,21 @@ def test_post_arm_metadata(_test_client, _login_user): "intervention1", "intervention2", ] - pytest.global_arm_id = response_data["arms"][0]["id"] -def test_get_arm_metadata(_test_client, _login_user): +def test_get_arm_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/arm/metadata' endpoint is requested (GET) THEN check that the response is valid and retrieves the arm metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/arm") - response_data = json.loads(response.data) + + response = _logged_in_client.get(f"/study/{study_id}/metadata/arm") + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["arms"][0]["label"] == "Label1" assert response_data["arms"][0]["type"] == "Experimental" assert response_data["arms"][0]["description"] == "Arm Description" @@ -56,7 +61,7 @@ def test_get_arm_metadata(_test_client, _login_user): ] -def test_delete_arm_metadata(_test_client, _login_user): +def test_delete_arm_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID and arm ID WHEN the '/study/{study_id}/arm/metadata' endpoint is requested (DELETE) @@ -64,19 +69,22 @@ def test_delete_arm_metadata(_test_client, _login_user): """ study_id = pytest.global_study_id["id"] # type: ignore arm_id = pytest.global_arm_id - response = _test_client.delete(f"/study/{study_id}/metadata/arm/{arm_id}") + + response = _logged_in_client.delete(f"/study/{study_id}/metadata/arm/{arm_id}") + assert response.status_code == 200 # ------------------- IPD METADATA ------------------- # -def test_post_available_ipd_metadata(_test_client, _login_user): +def test_post_available_ipd_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (POST) THEN check that the response is vaild and new IPD was created """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/available-ipd", json=[ { @@ -98,18 +106,20 @@ def test_post_available_ipd_metadata(_test_client, _login_user): assert response_data[0]["comment"] == "comment1" -def test_get_available_ipd_metadata(_test_client, _login_user): +def test_get_available_ipd_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (GET) THEN check that the response is vaild and retrieves the available IPD(s) """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/available-ipd") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/available-ipd") + assert response.status_code == 200 -def test_delete_available_ipd_metadata(_test_client, _login_user): +def test_delete_available_ipd_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and available IPD ID WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (DELETE) @@ -117,22 +127,24 @@ def test_delete_available_ipd_metadata(_test_client, _login_user): """ study_id = pytest.global_study_id["id"] # type: ignore available_ipd_id = pytest.global_available_ipd_id - response = _test_client.delete( + + response = _logged_in_client.delete( f"/study/{study_id}/metadata/available-ipd/{available_ipd_id}" ) + assert response.status_code == 200 # ------------------- CENTRAL CONTACT METADATA ------------------- # -def test_post_cc_metadata(_test_client, _login_user): +def test_post_cc_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/central-contact' endpoint is requested (POST) THEN check that the response is valid and creates the central contact metadata """ - # BUG: ROLE IS RETURNED AS NONE study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/central-contact", json=[ { @@ -149,41 +161,39 @@ def test_post_cc_metadata(_test_client, _login_user): assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_cc_id = response_data[0]["id"] - print("$$$$$$$$$") - print(response_data) - print("$$$$$$$$$") assert response_data[0]["name"] == "central-contact" assert response_data[0]["affiliation"] == "affiliation" - # assert response_data[0]["role"] == "role" + assert response_data[0]["role"] is None assert response_data[0]["phone"] == "808" assert response_data[0]["phone_ext"] == "909" assert response_data[0]["email_address"] == "sample@gmail.com" assert response_data[0]["central_contact"] is True -def test_get_cc_metadata(_test_client, _login_user): +def test_get_cc_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/central-contact' endpoint is requested (GET) THEN check that the response is valid and retrieves the central contact metadata """ - # BUG: ROLE IS RETURNED AS NONE study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/central-contact") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/central-contact") + assert response.status_code == 200 response_data = json.loads(response.data) assert response_data[0]["name"] == "central-contact" assert response_data[0]["affiliation"] == "affiliation" - # assert response_data[0]["role"] == "role" + assert response_data[0]["role"] is None assert response_data[0]["phone"] == "808" assert response_data[0]["phone_ext"] == "909" assert response_data[0]["email_address"] == "sample@gmail.com" assert response_data[0]["central_contact"] is True -def test_delete_cc_metadata(_test_client, _login_user): +def test_delete_cc_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and central contact ID @@ -193,36 +203,38 @@ def test_delete_cc_metadata(_test_client, _login_user): """ study_id = pytest.global_study_id["id"] # type: ignore central_contact_id = pytest.global_cc_id - response = _test_client.delete( + + response = _logged_in_client.delete( f"/study/{study_id}/metadata/central-contact/{central_contact_id}" ) + assert response.status_code == 200 # ------------------- COLLABORATORS METADATA ------------------- # -def test_get_collaborators_metadata(_test_client, _login_user): +def test_get_collaborators_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (GET) THEN check that the response is valid and retrieves the collaborators metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/collaborators") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/collaborators") + assert response.status_code == 200 -def test_put_collaborators_metadata(_test_client, _login_user): +def test_put_collaborators_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (POST) THEN check that the response is valid and creates the collaborators metadata """ - # BUG: ENDPOINT STORES KEY RATHER THAN VALUE - # RETURNS ['collaborator_name'] rather than ['collaborator'] - # (so it is storing the key rather than the value) study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/collaborators", json=[ "collaborator1123", @@ -236,28 +248,28 @@ def test_put_collaborators_metadata(_test_client, _login_user): # ------------------- CONDITIONS METADATA ------------------- # -def test_get_conditions_metadata(_test_client, _login_user): +def test_get_conditions_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) THEN check that the response is valid and retrieves the conditions metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/conditions") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/conditions") + assert response.status_code == 200 -def test_put_conditions_metadata(_test_client, _login_user): +def test_put_conditions_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (POST) THEN check that the response is valid and creates the conditions metadata """ - # BUG: ENDPOINT STORES KEY RATHER THAN VALUE - # RESPONSE FOR THIS TEST LOOKS LIKE - # ['conditions', 'keywords', 'oversight_has_dmc', 'size'] study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/conditions", json=[ "true", @@ -269,9 +281,6 @@ def test_put_conditions_metadata(_test_client, _login_user): assert response.status_code == 200 response_data = json.loads(response.data) - print("$$$$$$") - print(response_data) - print("$$$$$$") assert response_data[0] == "true" assert response_data[1] == "conditions string" @@ -280,25 +289,28 @@ def test_put_conditions_metadata(_test_client, _login_user): # ------------------- DESCRIPTION METADATA ------------------- # -def test_get_description_metadata(_test_client, _login_user): +def test_get_description_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) THEN check that the response is valid and retrieves the description metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/description") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/description") + assert response.status_code == 200 -def test_put_description_metadata(_test_client, _login_user): +def test_put_description_metadata(_logged_in_client): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/description' endpoint is requested (POST) THEN check that the response is valid and creates the description metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/description", json={ "brief_summary": "brief_summary", @@ -308,32 +320,34 @@ def test_put_description_metadata(_test_client, _login_user): assert response.status_code == 200 response_data = json.loads(response.data) - # pytest.global_id = response_data["study_id"] assert response_data["brief_summary"] == "brief_summary" assert response_data["detailed_description"] == "detailed_description" # ------------------- DESIGN METADATA ------------------- # -def test_get_design_metadata(_test_client, _login_user): +def test_get_design_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/design' endpoint is requested (GET) THEN check that the response is valid and retrieves the design metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/design") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/design") + assert response.status_code == 200 -def test_put_design_metadata(_test_client, _login_user): +def test_put_design_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/design' endpoint is requested (PUT) THEN check that the response is valid and creates the design metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/design", json={ "design_allocation": "dfasdfasd", @@ -356,6 +370,7 @@ def test_put_design_metadata(_test_client, _login_user): "number_groups_cohorts": 1, }, ) + assert response.status_code == 200 response_data = json.loads(response.data) @@ -383,25 +398,28 @@ def test_put_design_metadata(_test_client, _login_user): # ------------------- ELIGIBILITY METADATA ------------------- # -def test_get_eligibility_metadata(_test_client, _login_user): +def test_get_eligibility_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/eligibility' endpoint is requested (GET) THEN check that the response is valid and retrieves the eligibility metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/eligibility") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/eligibility") + assert response.status_code == 200 -def test_put_eligibility_metadata(_test_client, _login_user): +def test_put_eligibility_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/eligibility' endpoint is requested (PUT) THEN check that the response is valid and updates the eligibility metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/eligibility", json={ "gender": "All", @@ -437,27 +455,28 @@ def test_put_eligibility_metadata(_test_client, _login_user): # ------------------- IDENTIFICATION METADATA ------------------- # -def test_get_identification_metadata(_test_client, _login_user): +def test_get_identification_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) THEN check that the response is valid and retrieves the identification metadata """ - # BUG: ENDPOINT NOT WORKING study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/identification") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/identification") + assert response.status_code == 200 -def test_post_identification_metadata(_test_client, _login_user): +def test_post_identification_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (POST) THEN check that the response is valid and creates the identification metadata """ - # BUG: ENDPOINT NOT WORKING study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/identification", json={ "primary": { @@ -466,47 +485,70 @@ def test_post_identification_metadata(_test_client, _login_user): "identifier_domain": "domain", "identifier_link": "link", }, - "secondary": [], + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], }, ) + assert response.status_code == 200 - # response_data = json.loads(response.data) - # pytest.global_identification_id = response_data["id"] + response_data = json.loads(response.data) + pytest.global_identification_id = response_data["secondary"][0]["id"] + assert response_data["primary"]["identifier"] == "first" + assert response_data["primary"]["identifier_type"] == "test" + assert response_data["primary"]["identifier_domain"] == "domain" + assert response_data["primary"]["identifier_link"] == "link" + assert response_data["secondary"][0]["identifier"] == "test" + assert response_data["secondary"][0]["identifier_type"] == "test" + assert response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" + assert response_data["secondary"][0]["identifier_link"] == "link" -def test_delete_identification_metadata(_test_client, _login_user): + +def test_delete_identification_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) THEN check that the response is valid and retrieves the identification metadata """ - # BUG: ENDPOINT NOT WORKING - # study_id = pytest.global_study_id["id"] # type: ignore - # idenficiation_id = pytest.global_identification_id - # response = _test_client.delete(f"/study/{study_id}/metadata/identification/{identification_id}") # pylint: disable=line-too-long # noqa: E501 - # assert response.status_code == 200 + study_id = pytest.global_study_id["id"] # type: ignore + identification_id = pytest.global_identification_id + + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/identification/{identification_id}" + ) + + assert response.status_code == 200 # ------------------- INTERVENTION METADATA ------------------- # -def test_get_intervention_metadata(_test_client, _login_user): +def test_get_intervention_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (GET) THEN check that the response is valid and retrieves the intervention metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/intervention") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/intervention") + assert response.status_code == 200 -def test_post_intervention_metadata(_test_client, _login_user): +def test_post_intervention_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (POST) THEN check that the response is valid and creates the intervention metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/intervention", json=[ { @@ -518,6 +560,7 @@ def test_post_intervention_metadata(_test_client, _login_user): } ], ) + assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_intervention_id = response_data[0]["id"] @@ -530,25 +573,28 @@ def test_post_intervention_metadata(_test_client, _login_user): # ------------------- IPD SHARING METADATA ------------------- # -def test_get_ipdsharing_metadata(_test_client, _login_user): +def test_get_ipdsharing_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (GET) THEN check that the response is valid and retrieves the ipdsharing metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/ipdsharing") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/ipdsharing") + assert response.status_code == 200 -def test_put_ipdsharing_metadata(_test_client, _login_user): +def test_put_ipdsharing_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (PUT) THEN check that the response is valid and updates the ipdsharing metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/ipdsharing", json={ "ipd_sharing": "Yes", @@ -575,28 +621,32 @@ def test_put_ipdsharing_metadata(_test_client, _login_user): # ------------------- LINK METADATA ------------------- # -def test_get_link_metadata(_test_client, _login_user): +def test_get_link_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/link' endpoint is requested (GET) THEN check that the response is valid and retrieves the link metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/link") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/link") + assert response.status_code == 200 -def test_post_link_metadata(_test_client, _login_user): +def test_post_link_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/link' endpoint is requested (POST) THEN check that the response is valid and creates the link metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/link", json=[{"url": "google.com", "title": "google link"}], ) + assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_link_id = response_data[0]["id"] @@ -605,7 +655,7 @@ def test_post_link_metadata(_test_client, _login_user): assert response_data[0]["title"] == "google link" -def test_delete_link_metadata(_test_client, _login_user): +def test_delete_link_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and link ID WHEN the '/study/{study_id}/metadata/link/{link_id}' endpoint is requested (DELETE) @@ -613,30 +663,35 @@ def test_delete_link_metadata(_test_client, _login_user): """ study_id = pytest.global_study_id["id"] # type: ignore link_id = pytest.global_link_id - response = _test_client.delete(f"/study/{study_id}/metadata/link/{link_id}") + + response = _logged_in_client.delete(f"/study/{study_id}/metadata/link/{link_id}") + assert response.status_code == 200 # ------------------- LOCATION METADATA ------------------- # -def test_get_location_metadata(_test_client, _login_user): +def test_get_location_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/location' endpoint is requested (GET) THEN check that the response is valid and retrieves the location metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/location") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/location") + assert response.status_code == 200 -def test_post_location_metadata(_test_client, _login_user): +def test_post_location_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/location' endpoint is requested (POST) THEN check that the response is valid and creates the location metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/location", json=[ { @@ -649,6 +704,7 @@ def test_post_location_metadata(_test_client, _login_user): } ], ) + assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_location_id = response_data[0]["id"] @@ -661,7 +717,7 @@ def test_post_location_metadata(_test_client, _login_user): assert response_data[0]["country"] == "yes" -def test_delete_location_metadata(_test_client, _login_user): +def test_delete_location_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and location ID WHEN the '/study/{study_id}/metadata/location/{location_id}' @@ -670,32 +726,37 @@ def test_delete_location_metadata(_test_client, _login_user): """ study_id = pytest.global_study_id["id"] # type: ignore location_id = pytest.global_location_id - response = _test_client.delete(f"/study/{study_id}/metadata/location/{location_id}") + + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/location/{location_id}" + ) + assert response.status_code == 200 # ------------------- OTHER METADATA ------------------- # -def test_get_other_metadata(_test_client, _login_user): +def test_get_other_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/other' endpoint is requested (GET) THEN check that the response is valid and retrieves the other metadata """ - # BUG: KEYWORDS RETURNS A STRING '[]' INSTEAD OF A LIST - # BUG: CONDITIONS RETURNS A STRING '[]' INSTEAD OF A LIST (CONDITIONS ENDPOINT IS CAUSING WRONG RESPONSE HERE) # pylint: disable=line-too-long # noqa: E501 study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/other") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/other") + assert response.status_code == 200 -def test_put_other_metadata(_test_client, _login_user): +def test_put_other_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/other' endpoint is requested (PUT) THEN check that the response is valid and updates the other metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/other", json={ "oversight_has_dmc": False, @@ -704,6 +765,7 @@ def test_put_other_metadata(_test_client, _login_user): "size": 103, }, ) + assert response.status_code == 200 response_data = json.loads(response.data) @@ -714,28 +776,32 @@ def test_put_other_metadata(_test_client, _login_user): # ------------------- OVERALL-OFFICIAL METADATA ------------------- # -def test_get_overall_official_metadata(_test_client, _login_user): +def test_get_overall_official_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/overall-official' endpoint is requested (GET) THEN check that the response is valid and retrieves the overall-official metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/overall-official") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/overall-official") + assert response.status_code == 200 -def test_post_overall_official_metadata(_test_client, _login_user): +def test_post_overall_official_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/overall-official' endpoint is requested (POST) THEN check that the response is valid and creates the overall-official metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/overall-official", json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], ) + assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_overall_official_id = response_data[0]["id"] @@ -745,7 +811,7 @@ def test_post_overall_official_metadata(_test_client, _login_user): assert response_data[0]["role"] == "Study Chair" -def test_delete_overall_official_metadata(_test_client, _login_user): +def test_delete_overall_official_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and overall official ID @@ -755,62 +821,69 @@ def test_delete_overall_official_metadata(_test_client, _login_user): """ study_id = pytest.global_study_id["id"] # type: ignore overall_official_id = pytest.global_overall_official_id - response = _test_client.delete( + + response = _logged_in_client.delete( f"/study/{study_id}/metadata/overall-official/{overall_official_id}" ) + assert response.status_code == 200 # ------------------- OVERSIGHT METADATA ------------------- # -def test_get_oversight_metadata(_test_client, _login_user): +def test_get_oversight_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/oversight' endpoint is requested (GET) THEN check that the response is valid and retrieves the oversight metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/oversight") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/oversight") + assert response.status_code == 200 -def test_put_oversight_metadata(_test_client, _login_user): +def test_put_oversight_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/oversight' endpoint is requested (PUT) THEN check that the response is valid and updates the oversight metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": True} ) + assert response.status_code == 200 response_data = json.loads(response.data) - print(response) assert response_data is True # ------------------- REFERENCE METADATA ------------------- # -def test_get_reference_metadata(_test_client, _login_user): +def test_get_reference_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (GET) THEN check that the response is valid and retrieves the reference metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/reference") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/reference") + assert response.status_code == 200 -def test_post_reference_metadata(_test_client, _login_user): +def test_post_reference_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (POST) THEN check that the response is valid and creates the reference metadata """ - # BUG:? title key is not being returned in response (update: title isn't in the model) # pylint: disable=line-too-long # noqa: E501 study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.post( + + response = _logged_in_client.post( f"/study/{study_id}/metadata/reference", json=[ { @@ -820,6 +893,7 @@ def test_post_reference_metadata(_test_client, _login_user): } ], ) + assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_reference_id = response_data[0]["id"] @@ -829,7 +903,7 @@ def test_post_reference_metadata(_test_client, _login_user): assert response_data[0]["citation"] == "reference citation" -def test_delete_reference_metadata(_test_client, _login_user): +def test_delete_reference_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and reference ID @@ -839,32 +913,37 @@ def test_delete_reference_metadata(_test_client, _login_user): """ study_id = pytest.global_study_id["id"] # type: ignore reference_id = pytest.global_reference_id - response = _test_client.delete( + + response = _logged_in_client.delete( f"/study/{study_id}/metadata/reference/{reference_id}" ) + assert response.status_code == 200 # ------------------- SPONSORS METADATA ------------------- # -def test_get_sponsors_metadata(_test_client, _login_user): +def test_get_sponsors_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (GET) THEN check that the response is valid and retrieves the sponsors metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/sponsors") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/sponsors") + assert response.status_code == 200 -def test_put_sponsors_metadata(_test_client, _login_user): +def test_put_sponsors_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (PUT) THEN check that the response is valid and updates the sponsors metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/sponsors", json={ "responsible_party_type": "Sponsor", @@ -874,6 +953,7 @@ def test_put_sponsors_metadata(_test_client, _login_user): "lead_sponsor_name": "sponsor name", }, ) + assert response.status_code == 200 response_data = json.loads(response.data) @@ -888,25 +968,28 @@ def test_put_sponsors_metadata(_test_client, _login_user): # ------------------- STATUS METADATA ------------------- # -def test_get_status_metadata(_test_client, _login_user): +def test_get_status_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/status' endpoint is requested (GET) THEN check that the response is valid and retrieves the status metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.get(f"/study/{study_id}/metadata/status") + + response = _logged_in_client.get(f"/study/{study_id}/metadata/status") + assert response.status_code == 200 -def test_put_status_metadata(_test_client, _login_user): +def test_put_status_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/status' endpoint is requested (PUT) THEN check that the response is valid and updates the status metadata """ study_id = pytest.global_study_id["id"] # type: ignore - response = _test_client.put( + + response = _logged_in_client.put( f"/study/{study_id}/metadata/status", json={ "overall_status": "Withdrawn", @@ -917,6 +1000,7 @@ def test_put_status_metadata(_test_client, _login_user): "completion_date_type": "Actual", }, ) + assert response.status_code == 200 response_data = json.loads(response.data) From 3236235ec451ec4a1dbc0f513a5bd18daa47c56a Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Fri, 10 Nov 2023 13:59:19 -0800 Subject: [PATCH 352/505] merge: version into staging (#21) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add study metadata GET endpoint * fix: study design schema * feat: GET version study metadata * style: format * fix: dataset overview * fix: study metadata overview * style: 🎨 fix code style issues with Black * fix: format * style: pylint and flake errors * feat: changelog and readme endpoints * fix: dataset dataset version GET * fix: study version GET * fix: typo * style: 🎨 fix code style issues with Black * fix: schema * fix: version request * style: 🎨 fix code style issues with Black * style: format * fix: dataset version endpoints * fix: related items version * fix: delete dataset readme * feat: alembic file for deleted dataset readme table * style: 🎨 fix code style issues with Black * feat Contributor type return for minimised version metadata * style: 🎨 fix code style issues with Black * fix: alembix version * 👷 ci: add db migrations to dockerfile * fix: dataset title name became as uppercase * style: 🎨 fix code style issues with Black * style: format * fix: main title * fix: testing errors * style: format * fix: remove comment * fix: remove comment --------- Co-authored-by: Lint Action Co-authored-by: Sanjay Soundarajan --- Dockerfile | 3 + ...2ac2b020c7c_delete_dataset_readme_table.py | 22 +++ apis/__init__.py | 2 - apis/dataset.py | 146 ++++++++++++++---- apis/dataset_metadata/dataset_readme.py | 33 ---- apis/dataset_metadata/dataset_related_item.py | 1 - apis/dataset_metadata/dataset_title.py | 10 +- apis/study_metadata/study_design.py | 19 +-- apis/study_metadata/study_other.py | 3 +- model/__init__.py | 2 - model/dataset.py | 57 ++++++- model/dataset_metadata/dataset_access.py | 6 + .../dataset_alternate_identifier.py | 7 + model/dataset_metadata/dataset_consent.py | 7 + model/dataset_metadata/dataset_contributor.py | 9 ++ model/dataset_metadata/dataset_date.py | 10 ++ .../dataset_de_ident_level.py | 6 + model/dataset_metadata/dataset_description.py | 7 + model/dataset_metadata/dataset_funder.py | 7 + model/dataset_metadata/dataset_other.py | 14 ++ model/dataset_metadata/dataset_readme.py | 31 ---- model/dataset_metadata/dataset_record_keys.py | 6 + .../dataset_metadata/dataset_related_item.py | 34 +++- .../dataset_related_item_contributor.py | 8 + .../dataset_related_item_identifier.py | 7 + .../dataset_related_item_title.py | 7 + model/dataset_metadata/dataset_rights.py | 7 + model/dataset_metadata/dataset_subject.py | 7 + model/dataset_metadata/dataset_title.py | 7 + model/study.py | 47 ++++++ model/study_metadata/study_arm.py | 8 + model/study_metadata/study_available_ipd.py | 4 + model/study_metadata/study_contact.py | 10 ++ model/study_metadata/study_description.py | 4 + model/study_metadata/study_eligibility.py | 11 ++ model/study_metadata/study_identification.py | 8 + model/study_metadata/study_intervention.py | 8 + model/study_metadata/study_ipdsharing.py | 7 + model/study_metadata/study_link.py | 8 + model/study_metadata/study_location.py | 9 ++ model/study_metadata/study_other.py | 7 + .../study_metadata/study_overall_official.py | 8 + model/study_metadata/study_reference.py | 8 + .../study_sponsors_collaborators.py | 8 + model/study_metadata/study_status.py | 7 + model/version.py | 2 + model/version_readme.py | 16 +- poetry.lock | 111 ++++++------- pyproject.toml | 1 + sql/init.sql | 12 -- sql/init_timezones.sql | 10 -- .../test_study_dataset_metadata_api.py | 91 +++-------- 52 files changed, 603 insertions(+), 287 deletions(-) create mode 100644 alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py delete mode 100644 apis/dataset_metadata/dataset_readme.py delete mode 100644 model/dataset_metadata/dataset_readme.py diff --git a/Dockerfile b/Dockerfile index 24060626..7be75a98 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,4 +22,7 @@ COPY core ./core COPY app.py . COPY config.py . +# run database migrations +RUN alembic upgrade head + CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file diff --git a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py new file mode 100644 index 00000000..f2ea30f3 --- /dev/null +++ b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py @@ -0,0 +1,22 @@ +"""delete dataset readme table + +Revision ID: 72ac2b020c7c +Revises: +Create Date: 2023-11-08 15:47:00.205940 + +""" +from typing import Sequence, Union + +import alembic +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "72ac2b020c7c" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + alembic.op.drop_table("dataset_readme") diff --git a/apis/__init__.py b/apis/__init__.py index bfb5d0c0..bf2e0873 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -16,7 +16,6 @@ from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder from .dataset_metadata.dataset_other import api as dataset_other -from .dataset_metadata.dataset_readme import api as readme from .dataset_metadata.dataset_record_keys import api as record_keys from .dataset_metadata.dataset_related_item import api as related_item from .dataset_metadata.dataset_rights import api as rights @@ -63,7 +62,6 @@ "description", "funder", "dataset_other", - "readme", "record_keys", "related_item", "api", diff --git a/apis/dataset.py b/apis/dataset.py index dc609a75..ff933fda 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,6 +1,6 @@ import typing -from flask import Response, jsonify, request +from flask import jsonify, request from flask_restx import Namespace, Resource, fields import model @@ -32,6 +32,8 @@ "created_at": fields.String(required=True), "dataset_versions": fields.Nested(dataset_versions_model, required=True), "latest_version": fields.String(required=True), + "title": fields.String(required=True), + "description": fields.String(required=True), }, ) @@ -41,6 +43,7 @@ class DatasetList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset) + @api.doc("view datasets") def get(self, study_id): study = model.Study.query.get(study_id) datasets = model.Dataset.query.filter_by(study=study) @@ -48,13 +51,12 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("update dataset") + @api.doc("add datasets") @api.expect(dataset) def post(self, study_id): study = model.Study.query.get(study_id) if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 - # todo if study.participant id== different study Throw error data: typing.Union[typing.Any, dict] = request.json dataset_ = model.Dataset.from_data(study) model.db.session.add(dataset_) @@ -77,6 +79,7 @@ def post(self, study_id): @api.route("/study//dataset/") @api.response(201, "Success") @api.response(400, "Validation Error") +@api.doc("view dataset") class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @@ -86,6 +89,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(201, "Success") @api.response(400, "Validation Error") + @api.doc("update dataset") def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) @@ -102,26 +106,20 @@ def put(self, study_id: int, dataset_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.doc("delete dataset") def delete(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 + data_obj = model.Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: model.db.session.delete(version) + model.db.session.delete(data_obj) model.db.session.commit() return 204 - # def delete(self, study_id: int, dataset_id: int, version_id: int): - # data_obj = Dataset.query.get(dataset_id) - # for version in data_obj.dataset_versions: - # db.session.delete(version) - # db.session.commit() - # db.session.delete(data_obj) - # db.session.commit() - # return Response(status=204) - @api.route("/study//dataset//version/") class VersionResource(Resource): @@ -131,9 +129,15 @@ class VersionResource(Resource): def get( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("update dataset version") def put( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument @@ -145,26 +149,86 @@ def put( model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("delete dataset version") def delete( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - data_obj = model.Dataset.query.get(dataset_id) - for version in data_obj.dataset_versions: - model.db.session.delete(version) - model.db.session.commit() - model.db.session.delete(data_obj) + version_obj = model.Version.query.get(version_id) + model.db.session.delete(version_obj) + model.db.session.commit() + return 204 + + +@api.route("/study//dataset//version//changelog") +class VersionDatasetChangelog(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version changelog") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return {"changelog": version.changelog} + + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version changelog update") + def put( + self, study_id: str, dataset_id: str, version_id: str + ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + data: typing.Union[typing.Any, dict] = request.json + version_ = model.Version.query.get(version_id) + version_.changelog = data["changelog"] + model.db.session.commit() + return 201 + + +@api.route("/study//dataset//version//readme") +class VersionDatasetReadme(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version readme") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return version.version_readme.to_dict(), 200 + + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version readme update") + def put( + self, study_id: str, dataset_id: str, version_id: str + ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + data = request.json + version_ = model.Version.query.get(version_id) + version_.version_readme.update(data) model.db.session.commit() - return Response(status=204) + return 201 @api.route("/study//dataset//version") class VersionList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("versions") + @api.doc("view versions") def get(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("version", study): @@ -202,21 +266,37 @@ def post(self, study_id: int, dataset_id: int): # return "Access denied, you can not modify", 403 # data_obj = model.Version.query.get(version_id) # data: typing.Union[typing.Any, dict] = request.json -# dataset_versions = model.Version.from_data(data_obj, data) # model.db.session.commit() # return dataset_versions.to_dict() -# -# @api.route("/study//dataset/ -# /version//dataset-metadata") -# class VersionStudyMetadataResource(Resource): -# @api.response(201, "Success") -# @api.response(400, "Validation Error") -# @api.doc("version dataset metadata get") -# def get(self, study_id: int, dataset_id: int, version_id): -# study = model.Study.query.get(study_id) -# if not is_granted("dataset", study): -# return "Access denied, you can not modify", 403 -# version = dataset.dataset_version.get(version_id) -# pass +@api.route("/study//dataset//version//study-metadata") +class VersionDatasetMetadataResource(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version study metadata get") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return version.dataset.study.to_dict_study_metadata() + + +@api.route( + "/study//dataset//version//dataset-metadata" +) +class VersionStudyMetadataResource(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version dataset metadata get") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return version.dataset.to_dict_dataset_metadata() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py deleted file mode 100644 index edd1e0ad..00000000 --- a/apis/dataset_metadata/dataset_readme.py +++ /dev/null @@ -1,33 +0,0 @@ -from flask import request -from flask_restx import Resource, fields - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_readme = api.model( - "DatasetReadme", - {"id": fields.String(required=True), "content": fields.String(required=True)}, -) - - -@api.route("/study//dataset//metadata/readme") -class DatasetReadmeResource(Resource): - @api.doc("readme") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_readme) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - dataset_ = model.Dataset.query.get(dataset_id) - dataset_readme_ = dataset_.dataset_readme - return dataset_readme_.to_dict() - - def put(self, study_id: int, dataset_id: int): - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_readme.update(data) - model.db.session.commit() - return dataset_.dataset_readme.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index df5bca5b..da3defdd 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -160,7 +160,6 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", - "MainTitle", ], }, }, diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index b3d245c8..23793e7e 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -63,7 +63,6 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", - "MainTitle", ], }, }, @@ -83,17 +82,12 @@ def post(self, study_id: int, dataset_id: int): for i in data: if "id" in i and i["id"]: dataset_title_ = model.DatasetTitle.query.get(i["id"]) - # if dataset_title_.type == "MainTitle": - # return ( - # "Main Title type can not be modified", - # 403, - # dataset_title_.update(i) list_of_elements.append(dataset_title_.to_dict()) elif "id" not in i or not i["id"]: if i["type"] == "MainTitle": return ( - "MainTitle type can not be given", + "Main Title type can not be given", 403, ) dataset_title_ = model.DatasetTitle.from_data(data_obj, i) @@ -125,7 +119,7 @@ def delete( dataset_title_ = model.DatasetTitle.query.get(title_id) if dataset_title_.type == "MainTitle": return ( - "MainTitle type can not be deleted", + "Main Title type can not be deleted", 403, ) model.db.session.delete(dataset_title_) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 0b8fd32c..ebadf2e6 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -13,7 +13,6 @@ study_design = api.model( "StudyDesign", { - "id": fields.String(required=True), "design_allocation": fields.String(required=True), "study_type": fields.String(required=True), "design_intervention_model": fields.String(required=True), @@ -74,7 +73,7 @@ def put(self, study_id: int): "type": ["string", "null"], }, "design_who_masked_list": { - "type": "array", + "type": ["array", "null"], "items": { "type": "string", "oneOf": [ @@ -91,7 +90,7 @@ def put(self, study_id: int): "uniqueItems": True, }, "phase_list": { - "type": "array", + "type": ["array", "null"], "items": { "type": "string", "oneOf": [ @@ -111,9 +110,9 @@ def put(self, study_id: int): }, "uniqueItems": True, }, - "enrollment_count": {"type": "integer"}, + "enrollment_count": {"type": ["integer", "null"]}, "enrollment_type": { - "type": "string", + "type": ["string", "null"], "enum": ["Actual", "Anticipated"], }, "number_arms": {"type": ["integer", "null"]}, @@ -128,7 +127,7 @@ def put(self, study_id: int): "Case-Control", "Case-Only", "Case-Crossover", - "Ecologic or Community Study", + "Ecologic or Community", "Family-Based", "Other", ] @@ -155,12 +154,8 @@ def put(self, study_id: int): "uniqueItems": True, }, "bio_spec_retention": {"type": ["string", "null"]}, - "bio_spec_description": { - "type": ["string", "null"], - }, - "target_duration": { - "type": ["string", "null"], - }, + "bio_spec_description": {"type": ["string", "null"]}, + "target_duration": {"type": ["string", "null"]}, "number_groups_cohorts": {"type": ["integer", "null"]}, }, } diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 9030641c..fd04de4a 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -91,8 +91,7 @@ def get(self, study_id: int): study_ = model.Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc - - return study_oversight_has_dmc + return {"oversight": study_oversight_has_dmc} def put(self, study_id: int): """Update study oversight metadata""" diff --git a/model/__init__.py b/model/__init__.py index 5b91a29a..77f8ffeb 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -18,7 +18,6 @@ from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder from .dataset_metadata.dataset_other import DatasetOther -from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights from .dataset_metadata.dataset_subject import DatasetSubject @@ -71,7 +70,6 @@ "DatasetFunder", "DatasetAlternateIdentifier", "DatasetRights", - "DatasetReadme", "DatasetRecordKeys", "DatasetTitle", "DatasetSubject", diff --git a/model/dataset.py b/model/dataset.py index 98cdc078..96bc4d41 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -20,7 +20,6 @@ def __init__(self, study): self.dataset_record_keys = model.DatasetRecordKeys(self) self.dataset_de_ident_level = model.DatasetDeIdentLevel(self) self.dataset_consent = model.DatasetConsent(self) - self.dataset_readme = model.DatasetReadme(self) self.dataset_other = model.DatasetOther(self) self.dataset_title.append(model.DatasetTitle(self)) @@ -89,9 +88,6 @@ def __init__(self, study): dataset_other = db.relationship( "DatasetOther", back_populates="dataset", uselist=False, cascade="all, delete" ) - dataset_readme = db.relationship( - "DatasetReadme", back_populates="dataset", uselist=False, cascade="all, delete" - ) dataset_record_keys = db.relationship( "DatasetRecordKeys", back_populates="dataset", @@ -113,14 +109,61 @@ def __init__(self, study): def to_dict(self): last_published = self.last_published() - # last_modified = self.last_modified() - return { "id": self.id, "created_at": self.created_at, # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None, - # "title": self.dataset_title.title if self.dataset_title else "" + "title": [ + i.title if i.title else None for i in self.dataset_title # type: ignore + ][0], + "description": [ + i.description if i.type == "Abstract" else None + for i in self.dataset_description # type: ignore + ][0], + } + + def to_dict_dataset_metadata(self): + return { + "contributors": [ + i.to_dict_metadata() + for i in self.dataset_contributors # type: ignore + if not i.creator + ], + "about": self.dataset_other.to_dict_metadata(), + "publisher": self.dataset_other.to_dict_publisher(), # type: ignore + "access": self.dataset_access.to_dict_metadata(), + "consent": self.dataset_consent.to_dict_metadata(), + "dates": [i.to_dict_metadata() for i in self.dataset_date], # type: ignore + "de_identification": self.dataset_de_ident_level.to_dict_metadata(), + "descriptions": [ + i.to_dict_metadata() for i in self.dataset_description # type: ignore + ], + "funders": [ + i.to_dict_metadata() for i in self.dataset_funder # type: ignore + ], + "identifiers": [ + i.to_dict_metadata() + for i in self.dataset_alternate_identifier # type: ignore + ], + "creators": [ + i.to_dict_metadata() + for i in self.dataset_contributors # type: ignore + if i.creator + ], + "record_keys": self.dataset_record_keys.to_dict_metadata(), + "related_items": [ + i.to_dict_metadata() for i in self.dataset_related_item # type: ignore + ], + "rights": [ + i.to_dict_metadata() for i in self.dataset_rights # type: ignore + ], + "subjects": [ + i.to_dict_metadata() for i in self.dataset_subject # type: ignore + ], + "titles": [ + i.to_dict_metadata() for i in self.dataset_title # type: ignore + ], } def last_published(self): diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 577a5e14..7ef72ffb 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -30,6 +30,12 @@ def to_dict(self): "url": self.url, } + def to_dict_metadata(self): + return { + "type": self.type, + "description": self.description, + } + @staticmethod def from_data(dataset: Dataset, data: dict): dataset_access = DatasetAccess(dataset) diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index 1b2ebb70..7eba2e98 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -28,6 +28,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "type": self.type, + "identifier": self.identifier, + } + @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetAlternateIdentifier(dataset) diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 2258ea5d..e7ea1cd3 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -38,6 +38,13 @@ def to_dict(self): "details": self.details, } + def to_dict_metadata(self): + return { + "noncommercial": self.noncommercial, + "geog_restrict": self.geog_restrict, + "research_type": self.research_type, + } + @staticmethod def from_data(dataset, data: dict): dataset_consent = DatasetConsent(dataset) diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py index 6d4f54d7..2e14d02a 100644 --- a/model/dataset_metadata/dataset_contributor.py +++ b/model/dataset_metadata/dataset_contributor.py @@ -40,6 +40,15 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "name": self.name, + "name_type": self.name_identifier, + "contributor_type": self.contributor_type, + "creator": self.creator, + } + @staticmethod def from_data(dataset, data: dict): dataset_contributor = DatasetContributor(dataset) diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index c58a1741..f1da513c 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -31,6 +31,16 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + bigint_timestamp = self.date + unix_timestamp = bigint_timestamp / 1000 + datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) + return { + "id": self.id, + "date": datetime_obj.strftime("%m-%d-%Y"), + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetDate(dataset) diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index 4d8e1cc1..b5acccc4 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -38,6 +38,12 @@ def to_dict(self): "details": self.details, } + def to_dict_metadata(self): + return { + "direct": self.direct, + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_de_ident_level = DatasetDeIdentLevel(dataset) diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 97f3a8a2..6660e1f9 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -30,6 +30,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "description": self.description, + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_description = DatasetDescription(dataset) diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index 90c45551..061e7d31 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -38,6 +38,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "name": self.name, + "identifier": self.identifier, + } + @staticmethod def from_data(dataset, data: dict): dataset_funder = DatasetFunder(dataset) diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 5a96918f..0225972b 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -44,6 +44,20 @@ def to_dict(self): "resource_type": self.resource_type, } + def to_dict_metadata(self): + return { + "language": self.language, + "size": self.size, + "resource_type": self.resource_type, + } + + def to_dict_publisher(self): + return { + "managing_organization_name": self.managing_organization_name, + "managing_organization_ror_id": self.managing_organization_ror_id, + "publisher": self.publisher, + } + @staticmethod def from_data(dataset, data: dict): dataset_other = DatasetOther(dataset) diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py deleted file mode 100644 index 7fa75b3a..00000000 --- a/model/dataset_metadata/dataset_readme.py +++ /dev/null @@ -1,31 +0,0 @@ -from ..db import db - - -class DatasetReadme(db.Model): # type: ignore - def __init__(self, dataset): - self.dataset = dataset - self.content = "" - - __tablename__ = "dataset_readme" - content = db.Column(db.String, nullable=False) - - dataset_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False - ) - dataset = db.relationship("Dataset", back_populates="dataset_readme") - - def to_dict(self): - return { - "id": self.dataset_id, - "content": self.content, - } - - @staticmethod - def from_data(dataset, data: dict): - dataset_readme = DatasetReadme(dataset) - dataset_readme.update(data) - return dataset_readme - - def update(self, data: dict): - self.content = data["content"] - self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 993af3f2..9f2d9b94 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -17,6 +17,12 @@ def __init__(self, dataset): dataset = db.relationship("Dataset", back_populates="dataset_record_keys") def to_dict(self): + return { + "type": self.key_type, + "details": self.key_details, + } + + def to_dict_metadata(self): return { "key_type": self.key_type, "key_details": self.key_details, diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index 579bed63..f95db200 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -52,7 +52,6 @@ def to_dict(self): key=lambda creator: creator.created_at, ) creators = [c for c in sorted_contributors if c.creator] - contributors = [c for c in sorted_contributors if not c.creator] return { "id": self.id, @@ -97,6 +96,39 @@ def to_dict(self): ], } + def to_dict_metadata(self): + bigint_timestamp = self.dataset_related_item_other.publication_year + pub_year = "" + if bigint_timestamp: + unix_timestamp = bigint_timestamp / 1000 + datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) + pub_year = datetime_obj.strftime("%Y") + sorted_contributors = sorted( + self.dataset_related_item_contributor, + key=lambda creator: creator.created_at, + ) + + creators = [c for c in sorted_contributors if c.creator] + contributors = [c for c in sorted_contributors if not c.creator] + return { + "type": self.type, + "titles": [ + i.to_dict_metadata() + for i in self.dataset_related_item_title # type: ignore + ], + "identifiers": [ + i.to_dict_metadata() + for i in self.dataset_related_item_identifier # type: ignore + ], + "creators": [i.to_dict_metadata() for i in creators], # type: ignore + "contributors": [ + i.to_dict_metadata() for i in contributors # type: ignore + ], + # "publication_year": self.dataset_related_item_other.publication_year, + "publication_year": pub_year if bigint_timestamp else None, + "publisher": self.dataset_related_item_other.publisher, + } + @staticmethod def from_data(dataset, data: dict): dataset_related_item = DatasetRelatedItem(dataset) diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index b38b3651..480757cb 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -36,6 +36,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "name": self.name, + "name_type": self.name_type, + "contributor_type": self.contributor_type, + } + @staticmethod def from_data(dataset_related_item, data: dict, creator): contributor_ = DatasetRelatedItemContributor(dataset_related_item, creator) diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 48b2e548..63d95f4b 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -38,6 +38,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "identifier": self.identifier, + "type": self.type, + } + @staticmethod def from_data(dataset_related_item, data: dict): identifier_ = DatasetRelatedItemIdentifier(dataset_related_item) diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 2d4ea08d..fab997e5 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -32,6 +32,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "title": self.title, + "type": self.type, + } + @staticmethod def from_data(dataset_related_item, data: dict): dataset_related_item_title = DatasetRelatedItemTitle(dataset_related_item) diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 27ad1a63..77103cde 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -33,6 +33,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "rights": self.rights, + "identifier": self.identifier, + } + @staticmethod def from_data(dataset, data: dict): dataset_rights = DatasetRights(dataset) diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 43ea560e..0e6d5792 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -35,6 +35,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "subject": self.subject, + "scheme": self.scheme, + } + @staticmethod def from_data(dataset, data: dict): dataset_subject = DatasetSubject(dataset) diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index eff54c9f..f8426471 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -30,6 +30,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "title": self.title, + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_title = DatasetTitle(dataset) diff --git a/model/study.py b/model/study.py index a919bbae..667b6f6f 100644 --- a/model/study.py +++ b/model/study.py @@ -172,6 +172,53 @@ def to_dict(self): "role": contributor_permission.to_dict()["role"], } + def to_dict_study_metadata(self): + # self.study_contact: Iterable = [] + primary = [ + i.to_dict_metadata() + for i in self.study_identification # type: ignore + if not i.secondary + ] + + return { + "arms": [i.to_dict_metadata() for i in self.study_arm], # type: ignore + "available_ipd": [ + i.to_dict_metadata() for i in self.study_available_ipd # type: ignore + ], + "contacts": [ + i.to_dict_metadata() for i in self.study_contact # type: ignore + ], + "description": self.study_description.to_dict_metadata(), + "design": self.study_design.to_dict(), + "eligibility": self.study_eligibility.to_dict_metadata(), + "primary_identifier": primary[0] if len(primary) else None, + "secondary_identifiers": [ + i.to_dict_metadata() + for i in self.study_identification # type: ignore + if i.secondary + ], + "interventions": [ + i.to_dict_metadata() for i in self.study_intervention # type: ignore + ], + "ipd_sharing": self.study_ipdsharing.to_dict_metadata(), + "links": [i.to_dict_metadata() for i in self.study_link], # type: ignore + "locations": [ + i.to_dict_metadata() for i in self.study_location # type: ignore + ], + "overall_officials": [ + i.to_dict_metadata() + for i in self.study_overall_official # type: ignore + ], + "references": [ + i.to_dict_metadata() for i in self.study_reference # type: ignore + ], + "sponsors": self.study_sponsors_collaborators.to_dict_metadata(), + "collaborators": self.study_sponsors_collaborators.collaborator_name, + "status": self.study_status.to_dict_metadata(), + "oversight": self.study_other.oversight_has_dmc, + "conditions": self.study_other.conditions, + } + @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 01a9eed6..db4609cf 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -43,6 +43,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "label": self.label, + "description": self.description, + } + @staticmethod def from_data(study: model.Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index cd65d626..5ae9913c 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -40,6 +40,10 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return {"identifier": self.identifier, "url": self.url} + @staticmethod def from_data(study: model.StudyArm, data: dict): """Creates a new study metadata from a dictionary""" diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 4f943e6f..d2c5c5d3 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -48,6 +48,16 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "name": self.name, + "affiliation": self.affiliation, + "phone": self.phone, + "email_address": self.email_address, + } + @staticmethod def from_data(study: Study, data: dict, role, central_contact): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index be3289da..f5115882 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -32,6 +32,10 @@ def to_dict(self): "detailed_description": self.detailed_description, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return {"brief_summary": self.brief_summary} + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 752a69f4..81019b09 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -67,6 +67,17 @@ def to_dict(self): else None, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "gender": self.gender, + "gender_based": self.gender_based, + "minimum_age_value": self.minimum_age_value, + "maximum_age_value": self.maximum_age_value, + "inclusion_criteria": self.inclusion_criteria, + "exclusion_criteria": self.exclusion_criteria, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index f6373bf1..299f20bd 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -41,6 +41,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "identifier": self.identifier, + "identifier_type": self.identifier_type, + "id": self.id, + } + @staticmethod def from_data(study: Study, data: dict, secondary): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 0230b832..bb946cd2 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -45,6 +45,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "type": self.type, + "name": self.name, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 9d7868bc..9152959c 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -46,6 +46,13 @@ def to_dict(self): "ipd_sharing_url": self.ipd_sharing_url, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "ipd_sharing": self.ipd_sharing, + "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 946704d2..aa3ba44e 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -36,6 +36,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "title": self.title, + "url": self.url, + "id": self.id, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 06e640d6..27ac6476 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -44,6 +44,15 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "facility": self.facility, + "city": self.city, + "country": self.country, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index f66758e9..237d5b98 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -41,6 +41,13 @@ def to_dict(self): "size": self.size, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "oversight_has_dmc": self.oversight_has_dmc, + "conditions": self.conditions, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index e173cd0b..069f3099 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -38,6 +38,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "name": self.name, + "role": self.role, + "affiliation": self.affiliation, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 113af7b0..cce05886 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -38,6 +38,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "identifier": self.identifier, + "citation": self.citation, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index a8ab84f5..9f916611 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -45,6 +45,14 @@ def to_dict(self): "lead_sponsor_name": self.lead_sponsor_name, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "responsible_party_type": self.responsible_party_type, + "responsible_party_investigator_name": self.responsible_party_investigator_name, + "lead_sponsor_name": self.lead_sponsor_name, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 24920e32..dd1ffe78 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -43,6 +43,13 @@ def to_dict(self): "completion_date_type": self.completion_date_type, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "overall_status": self.overall_status, + "start_date": self.start_date, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/version.py b/model/version.py index 9cff0264..3aa3bb5c 100644 --- a/model/version.py +++ b/model/version.py @@ -4,6 +4,7 @@ from sqlalchemy import Table +import model from model.dataset import Dataset from .db import db @@ -21,6 +22,7 @@ def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.version_readme = model.VersionReadme(self) __tablename__ = "version" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/version_readme.py b/model/version_readme.py index ec657f54..c04bd1b5 100644 --- a/model/version_readme.py +++ b/model/version_readme.py @@ -2,6 +2,10 @@ class VersionReadme(db.Model): # type: ignore + def __init__(self, version): + self.version = version + self.content = "" + __tablename__ = "version_readme" content = db.Column(db.String, nullable=True) @@ -12,14 +16,14 @@ class VersionReadme(db.Model): # type: ignore def to_dict(self): return { - "content": self.content, + "readme": self.content, } @staticmethod - def from_data(data: dict): - user = VersionReadme() - user.update(data) - return user + def from_data(version, data: dict): + readme = VersionReadme(version) + readme.update(data) + return readme def update(self, data: dict): - self.content = data["content"] + self.content = data["readme"] diff --git a/poetry.lock b/poetry.lock index e322ea82..b9e4bb35 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,26 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +[[package]] +name = "alembic" +version = "1.12.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, + {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["python-dateutil"] + [[package]] name = "aniso8601" version = "9.0.1" @@ -658,51 +679,6 @@ docopt = ">=0.6" minilog = ">=2.0" requests = ">=2.28,<3.0" -[[package]] -name = "cryptography" -version = "41.0.5" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - [[package]] name = "debugpy" version = "1.6.7" @@ -1673,19 +1649,6 @@ files = [ {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, ] -[[package]] -name = "jwt" -version = "1.3.1" -description = "JSON Web Token library for Python 3." -optional = false -python-versions = ">= 3.6" -files = [ - {file = "jwt-1.3.1-py3-none-any.whl", hash = "sha256:61c9170f92e736b530655e75374681d4fcca9cfa8763ab42be57353b2b203494"}, -] - -[package.dependencies] -cryptography = ">=3.1,<3.4.0 || >3.4.0" - [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -1731,6 +1694,25 @@ files = [ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] +[[package]] +name = "mako" +version = "1.3.0" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, + {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "markdown" version = "3.3.7" @@ -2660,6 +2642,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2667,8 +2650,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2685,6 +2675,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2692,6 +2683,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3266,7 +3258,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" -category = "dev" optional = false python-versions = "*" files = [ @@ -3578,4 +3569,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "e3e1b2d0645e5cd7ad0281091d65e85b411eab0ddd7c475762e908bf9c10bdb4" +content-hash = "6baf6175b51ec48a4653b31437eb4d02ff6385d8f973566192dbd31cb9c8c586" diff --git a/pyproject.toml b/pyproject.toml index 2dee3a88..60636010 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ python-dotenv = "^1.0.0" flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" email-validator = "^2.0.0.post2" +alembic = "^1.12.1" [tool.poetry.group.dev.dependencies] diff --git a/sql/init.sql b/sql/init.sql index bd8d6c79..1127e19a 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -243,19 +243,7 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme -CREATE TABLE IF NOT EXISTS "dataset_readme" ( - "id" CHAR(36) NOT NULL, - "content" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NOT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); --- Dumping data for table public.dataset_readme: 1 rows -/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; -INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES - ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 9edd316b..cfcb1cb1 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -297,19 +297,9 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme -CREATE TABLE IF NOT EXISTS "dataset_readme" ( - "id" CHAR(36) NOT NULL, - "content" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NOT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); -- Dumping data for table public.dataset_readme: -1 rows /*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; -INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES - ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 50bc66b3..13099053 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -50,6 +50,23 @@ def test_put_dataset_access_metadata(_logged_in_client): # ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # +def test_get_alternative_identifier(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset alternative identifier content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + + assert response.status_code == 200 + + def test_post_alternative_identifier(_logged_in_client): """ Given a Flask application configured for testing and a study ID @@ -65,7 +82,7 @@ def test_post_alternative_identifier(_logged_in_client): json=[ { "identifier": "identifier test", - "type": "ark", + "type": "ARK", } ], ) @@ -75,24 +92,7 @@ def test_post_alternative_identifier(_logged_in_client): pytest.global_alternative_identifier_id = response_data[0]["id"] assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ark" - - -def test_get_alternative_identifier(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset alternative identifier content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - - assert response.status_code == 200 + assert response_data[0]["type"] == "ARK" def test_delete_alternative_identifier(_logged_in_client): @@ -685,49 +685,6 @@ def test_put_dataset_publisher_metadata(_logged_in_client): ) -# ------------------- README METADATA ------------------- # -def test_get_dataset_readme_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - readme metadata content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/readme" - ) - - assert response.status_code == 200 - - -def test_put_dataset_readme_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - readme metadata content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/readme", - json={ - "content": "This is the readme content", - }, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["content"] == "This is the readme content" - - # ------------------- RECORD KEYS METADATA ------------------- # def test_get_dataset_record_keys_metadata(_logged_in_client): """ @@ -766,8 +723,8 @@ def test_put_dataset_record_keys_metadata(_logged_in_client): assert response.status_code == 201 response_data = json.loads(response.data) - assert response_data["key_type"] == "Record Type" - assert response_data["key_details"] == "Details for Record Keys" + assert response_data["type"] == "Record Type" + assert response_data["details"] == "Details for Record Keys" # ------------------- RELATED ITEM METADATA ------------------- # @@ -792,7 +749,7 @@ def test_get_dataset_related_item_metadata(_logged_in_client): def test_post_dataset_related_item_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset' + When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-item' endpoint is requested (POST) Then check that the response is valid and creates the dataset related item metadata content @@ -820,7 +777,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): "metadata_scheme": "Metadata Scheme", "scheme_type": "Scheme Type", "scheme_uri": "Scheme URI", - "type": "ark", + "type": "ARK", } ], "issue": "Issue", @@ -862,7 +819,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["identifiers"][0]["type"] == "ark" + assert response_data[0]["identifiers"][0]["type"] == "ARK" assert response_data[0]["issue"] == "Issue" assert response_data[0]["last_page"] == "Last Page" assert response_data[0]["number_type"] == "Number Type" From ef26982cfc6f4f5269ff1537104f10a3d24f3f2f Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 10 Nov 2023 13:59:55 -0800 Subject: [PATCH 353/505] Revert "merge: version into staging (#21)" (#23) This reverts commit 3236235ec451ec4a1dbc0f513a5bd18daa47c56a. --- Dockerfile | 3 - ...2ac2b020c7c_delete_dataset_readme_table.py | 22 --- apis/__init__.py | 2 + apis/dataset.py | 146 ++++-------------- apis/dataset_metadata/dataset_readme.py | 33 ++++ apis/dataset_metadata/dataset_related_item.py | 1 + apis/dataset_metadata/dataset_title.py | 10 +- apis/study_metadata/study_design.py | 19 ++- apis/study_metadata/study_other.py | 3 +- model/__init__.py | 2 + model/dataset.py | 57 +------ model/dataset_metadata/dataset_access.py | 6 - .../dataset_alternate_identifier.py | 7 - model/dataset_metadata/dataset_consent.py | 7 - model/dataset_metadata/dataset_contributor.py | 9 -- model/dataset_metadata/dataset_date.py | 10 -- .../dataset_de_ident_level.py | 6 - model/dataset_metadata/dataset_description.py | 7 - model/dataset_metadata/dataset_funder.py | 7 - model/dataset_metadata/dataset_other.py | 14 -- model/dataset_metadata/dataset_readme.py | 31 ++++ model/dataset_metadata/dataset_record_keys.py | 6 - .../dataset_metadata/dataset_related_item.py | 34 +--- .../dataset_related_item_contributor.py | 8 - .../dataset_related_item_identifier.py | 7 - .../dataset_related_item_title.py | 7 - model/dataset_metadata/dataset_rights.py | 7 - model/dataset_metadata/dataset_subject.py | 7 - model/dataset_metadata/dataset_title.py | 7 - model/study.py | 47 ------ model/study_metadata/study_arm.py | 8 - model/study_metadata/study_available_ipd.py | 4 - model/study_metadata/study_contact.py | 10 -- model/study_metadata/study_description.py | 4 - model/study_metadata/study_eligibility.py | 11 -- model/study_metadata/study_identification.py | 8 - model/study_metadata/study_intervention.py | 8 - model/study_metadata/study_ipdsharing.py | 7 - model/study_metadata/study_link.py | 8 - model/study_metadata/study_location.py | 9 -- model/study_metadata/study_other.py | 7 - .../study_metadata/study_overall_official.py | 8 - model/study_metadata/study_reference.py | 8 - .../study_sponsors_collaborators.py | 8 - model/study_metadata/study_status.py | 7 - model/version.py | 2 - model/version_readme.py | 16 +- poetry.lock | 111 +++++++------ pyproject.toml | 1 - sql/init.sql | 12 ++ sql/init_timezones.sql | 10 ++ .../test_study_dataset_metadata_api.py | 91 ++++++++--- 52 files changed, 287 insertions(+), 603 deletions(-) delete mode 100644 alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py create mode 100644 apis/dataset_metadata/dataset_readme.py create mode 100644 model/dataset_metadata/dataset_readme.py diff --git a/Dockerfile b/Dockerfile index 7be75a98..24060626 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,7 +22,4 @@ COPY core ./core COPY app.py . COPY config.py . -# run database migrations -RUN alembic upgrade head - CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file diff --git a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py deleted file mode 100644 index f2ea30f3..00000000 --- a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py +++ /dev/null @@ -1,22 +0,0 @@ -"""delete dataset readme table - -Revision ID: 72ac2b020c7c -Revises: -Create Date: 2023-11-08 15:47:00.205940 - -""" -from typing import Sequence, Union - -import alembic -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "72ac2b020c7c" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - alembic.op.drop_table("dataset_readme") diff --git a/apis/__init__.py b/apis/__init__.py index bf2e0873..bfb5d0c0 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -16,6 +16,7 @@ from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder from .dataset_metadata.dataset_other import api as dataset_other +from .dataset_metadata.dataset_readme import api as readme from .dataset_metadata.dataset_record_keys import api as record_keys from .dataset_metadata.dataset_related_item import api as related_item from .dataset_metadata.dataset_rights import api as rights @@ -62,6 +63,7 @@ "description", "funder", "dataset_other", + "readme", "record_keys", "related_item", "api", diff --git a/apis/dataset.py b/apis/dataset.py index ff933fda..dc609a75 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,6 +1,6 @@ import typing -from flask import jsonify, request +from flask import Response, jsonify, request from flask_restx import Namespace, Resource, fields import model @@ -32,8 +32,6 @@ "created_at": fields.String(required=True), "dataset_versions": fields.Nested(dataset_versions_model, required=True), "latest_version": fields.String(required=True), - "title": fields.String(required=True), - "description": fields.String(required=True), }, ) @@ -43,7 +41,6 @@ class DatasetList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset) - @api.doc("view datasets") def get(self, study_id): study = model.Study.query.get(study_id) datasets = model.Dataset.query.filter_by(study=study) @@ -51,12 +48,13 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("add datasets") + @api.doc("update dataset") @api.expect(dataset) def post(self, study_id): study = model.Study.query.get(study_id) if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 + # todo if study.participant id== different study Throw error data: typing.Union[typing.Any, dict] = request.json dataset_ = model.Dataset.from_data(study) model.db.session.add(dataset_) @@ -79,7 +77,6 @@ def post(self, study_id): @api.route("/study//dataset/") @api.response(201, "Success") @api.response(400, "Validation Error") -@api.doc("view dataset") class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @@ -89,7 +86,6 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("update dataset") def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) @@ -106,20 +102,26 @@ def put(self, study_id: int, dataset_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.doc("delete dataset") def delete(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - data_obj = model.Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: model.db.session.delete(version) - model.db.session.delete(data_obj) model.db.session.commit() return 204 + # def delete(self, study_id: int, dataset_id: int, version_id: int): + # data_obj = Dataset.query.get(dataset_id) + # for version in data_obj.dataset_versions: + # db.session.delete(version) + # db.session.commit() + # db.session.delete(data_obj) + # db.session.commit() + # return Response(status=204) + @api.route("/study//dataset//version/") class VersionResource(Resource): @@ -129,15 +131,9 @@ class VersionResource(Resource): def get( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("update dataset version") def put( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument @@ -149,86 +145,26 @@ def put( model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("delete dataset version") def delete( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - version_obj = model.Version.query.get(version_id) - model.db.session.delete(version_obj) - model.db.session.commit() - return 204 - - -@api.route("/study//dataset//version//changelog") -class VersionDatasetChangelog(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version changelog") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return {"changelog": version.changelog} - - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version changelog update") - def put( - self, study_id: str, dataset_id: str, version_id: str - ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - data: typing.Union[typing.Any, dict] = request.json - version_ = model.Version.query.get(version_id) - version_.changelog = data["changelog"] - model.db.session.commit() - return 201 - - -@api.route("/study//dataset//version//readme") -class VersionDatasetReadme(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version readme") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return version.version_readme.to_dict(), 200 - - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version readme update") - def put( - self, study_id: str, dataset_id: str, version_id: str - ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - data = request.json - version_ = model.Version.query.get(version_id) - version_.version_readme.update(data) + data_obj = model.Dataset.query.get(dataset_id) + for version in data_obj.dataset_versions: + model.db.session.delete(version) + model.db.session.commit() + model.db.session.delete(data_obj) model.db.session.commit() - return 201 + return Response(status=204) @api.route("/study//dataset//version") class VersionList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("view versions") + @api.doc("versions") def get(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("version", study): @@ -266,37 +202,21 @@ def post(self, study_id: int, dataset_id: int): # return "Access denied, you can not modify", 403 # data_obj = model.Version.query.get(version_id) # data: typing.Union[typing.Any, dict] = request.json +# dataset_versions = model.Version.from_data(data_obj, data) # model.db.session.commit() # return dataset_versions.to_dict() -@api.route("/study//dataset//version//study-metadata") -class VersionDatasetMetadataResource(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version study metadata get") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return version.dataset.study.to_dict_study_metadata() - - -@api.route( - "/study//dataset//version//dataset-metadata" -) -class VersionStudyMetadataResource(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version dataset metadata get") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return version.dataset.to_dict_dataset_metadata() +# +# @api.route("/study//dataset/ +# /version//dataset-metadata") +# class VersionStudyMetadataResource(Resource): +# @api.response(201, "Success") +# @api.response(400, "Validation Error") +# @api.doc("version dataset metadata get") +# def get(self, study_id: int, dataset_id: int, version_id): +# study = model.Study.query.get(study_id) +# if not is_granted("dataset", study): +# return "Access denied, you can not modify", 403 +# version = dataset.dataset_version.get(version_id) +# pass diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py new file mode 100644 index 00000000..edd1e0ad --- /dev/null +++ b/apis/dataset_metadata/dataset_readme.py @@ -0,0 +1,33 @@ +from flask import request +from flask_restx import Resource, fields + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_readme = api.model( + "DatasetReadme", + {"id": fields.String(required=True), "content": fields.String(required=True)}, +) + + +@api.route("/study//dataset//metadata/readme") +class DatasetReadmeResource(Resource): + @api.doc("readme") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_readme) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + dataset_ = model.Dataset.query.get(dataset_id) + dataset_readme_ = dataset_.dataset_readme + return dataset_readme_.to_dict() + + def put(self, study_id: int, dataset_id: int): + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_readme.update(data) + model.db.session.commit() + return dataset_.dataset_readme.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index da3defdd..df5bca5b 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -160,6 +160,7 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", + "MainTitle", ], }, }, diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 23793e7e..b3d245c8 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -63,6 +63,7 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", + "MainTitle", ], }, }, @@ -82,12 +83,17 @@ def post(self, study_id: int, dataset_id: int): for i in data: if "id" in i and i["id"]: dataset_title_ = model.DatasetTitle.query.get(i["id"]) + # if dataset_title_.type == "MainTitle": + # return ( + # "Main Title type can not be modified", + # 403, + # dataset_title_.update(i) list_of_elements.append(dataset_title_.to_dict()) elif "id" not in i or not i["id"]: if i["type"] == "MainTitle": return ( - "Main Title type can not be given", + "MainTitle type can not be given", 403, ) dataset_title_ = model.DatasetTitle.from_data(data_obj, i) @@ -119,7 +125,7 @@ def delete( dataset_title_ = model.DatasetTitle.query.get(title_id) if dataset_title_.type == "MainTitle": return ( - "Main Title type can not be deleted", + "MainTitle type can not be deleted", 403, ) model.db.session.delete(dataset_title_) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index ebadf2e6..0b8fd32c 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -13,6 +13,7 @@ study_design = api.model( "StudyDesign", { + "id": fields.String(required=True), "design_allocation": fields.String(required=True), "study_type": fields.String(required=True), "design_intervention_model": fields.String(required=True), @@ -73,7 +74,7 @@ def put(self, study_id: int): "type": ["string", "null"], }, "design_who_masked_list": { - "type": ["array", "null"], + "type": "array", "items": { "type": "string", "oneOf": [ @@ -90,7 +91,7 @@ def put(self, study_id: int): "uniqueItems": True, }, "phase_list": { - "type": ["array", "null"], + "type": "array", "items": { "type": "string", "oneOf": [ @@ -110,9 +111,9 @@ def put(self, study_id: int): }, "uniqueItems": True, }, - "enrollment_count": {"type": ["integer", "null"]}, + "enrollment_count": {"type": "integer"}, "enrollment_type": { - "type": ["string", "null"], + "type": "string", "enum": ["Actual", "Anticipated"], }, "number_arms": {"type": ["integer", "null"]}, @@ -127,7 +128,7 @@ def put(self, study_id: int): "Case-Control", "Case-Only", "Case-Crossover", - "Ecologic or Community", + "Ecologic or Community Study", "Family-Based", "Other", ] @@ -154,8 +155,12 @@ def put(self, study_id: int): "uniqueItems": True, }, "bio_spec_retention": {"type": ["string", "null"]}, - "bio_spec_description": {"type": ["string", "null"]}, - "target_duration": {"type": ["string", "null"]}, + "bio_spec_description": { + "type": ["string", "null"], + }, + "target_duration": { + "type": ["string", "null"], + }, "number_groups_cohorts": {"type": ["integer", "null"]}, }, } diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index fd04de4a..9030641c 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -91,7 +91,8 @@ def get(self, study_id: int): study_ = model.Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc - return {"oversight": study_oversight_has_dmc} + + return study_oversight_has_dmc def put(self, study_id: int): """Update study oversight metadata""" diff --git a/model/__init__.py b/model/__init__.py index 77f8ffeb..5b91a29a 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -18,6 +18,7 @@ from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder from .dataset_metadata.dataset_other import DatasetOther +from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights from .dataset_metadata.dataset_subject import DatasetSubject @@ -70,6 +71,7 @@ "DatasetFunder", "DatasetAlternateIdentifier", "DatasetRights", + "DatasetReadme", "DatasetRecordKeys", "DatasetTitle", "DatasetSubject", diff --git a/model/dataset.py b/model/dataset.py index 96bc4d41..98cdc078 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -20,6 +20,7 @@ def __init__(self, study): self.dataset_record_keys = model.DatasetRecordKeys(self) self.dataset_de_ident_level = model.DatasetDeIdentLevel(self) self.dataset_consent = model.DatasetConsent(self) + self.dataset_readme = model.DatasetReadme(self) self.dataset_other = model.DatasetOther(self) self.dataset_title.append(model.DatasetTitle(self)) @@ -88,6 +89,9 @@ def __init__(self, study): dataset_other = db.relationship( "DatasetOther", back_populates="dataset", uselist=False, cascade="all, delete" ) + dataset_readme = db.relationship( + "DatasetReadme", back_populates="dataset", uselist=False, cascade="all, delete" + ) dataset_record_keys = db.relationship( "DatasetRecordKeys", back_populates="dataset", @@ -109,61 +113,14 @@ def __init__(self, study): def to_dict(self): last_published = self.last_published() + # last_modified = self.last_modified() + return { "id": self.id, "created_at": self.created_at, # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None, - "title": [ - i.title if i.title else None for i in self.dataset_title # type: ignore - ][0], - "description": [ - i.description if i.type == "Abstract" else None - for i in self.dataset_description # type: ignore - ][0], - } - - def to_dict_dataset_metadata(self): - return { - "contributors": [ - i.to_dict_metadata() - for i in self.dataset_contributors # type: ignore - if not i.creator - ], - "about": self.dataset_other.to_dict_metadata(), - "publisher": self.dataset_other.to_dict_publisher(), # type: ignore - "access": self.dataset_access.to_dict_metadata(), - "consent": self.dataset_consent.to_dict_metadata(), - "dates": [i.to_dict_metadata() for i in self.dataset_date], # type: ignore - "de_identification": self.dataset_de_ident_level.to_dict_metadata(), - "descriptions": [ - i.to_dict_metadata() for i in self.dataset_description # type: ignore - ], - "funders": [ - i.to_dict_metadata() for i in self.dataset_funder # type: ignore - ], - "identifiers": [ - i.to_dict_metadata() - for i in self.dataset_alternate_identifier # type: ignore - ], - "creators": [ - i.to_dict_metadata() - for i in self.dataset_contributors # type: ignore - if i.creator - ], - "record_keys": self.dataset_record_keys.to_dict_metadata(), - "related_items": [ - i.to_dict_metadata() for i in self.dataset_related_item # type: ignore - ], - "rights": [ - i.to_dict_metadata() for i in self.dataset_rights # type: ignore - ], - "subjects": [ - i.to_dict_metadata() for i in self.dataset_subject # type: ignore - ], - "titles": [ - i.to_dict_metadata() for i in self.dataset_title # type: ignore - ], + # "title": self.dataset_title.title if self.dataset_title else "" } def last_published(self): diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 7ef72ffb..577a5e14 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -30,12 +30,6 @@ def to_dict(self): "url": self.url, } - def to_dict_metadata(self): - return { - "type": self.type, - "description": self.description, - } - @staticmethod def from_data(dataset: Dataset, data: dict): dataset_access = DatasetAccess(dataset) diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index 7eba2e98..1b2ebb70 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -28,13 +28,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "type": self.type, - "identifier": self.identifier, - } - @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetAlternateIdentifier(dataset) diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index e7ea1cd3..2258ea5d 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -38,13 +38,6 @@ def to_dict(self): "details": self.details, } - def to_dict_metadata(self): - return { - "noncommercial": self.noncommercial, - "geog_restrict": self.geog_restrict, - "research_type": self.research_type, - } - @staticmethod def from_data(dataset, data: dict): dataset_consent = DatasetConsent(dataset) diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py index 2e14d02a..6d4f54d7 100644 --- a/model/dataset_metadata/dataset_contributor.py +++ b/model/dataset_metadata/dataset_contributor.py @@ -40,15 +40,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "name": self.name, - "name_type": self.name_identifier, - "contributor_type": self.contributor_type, - "creator": self.creator, - } - @staticmethod def from_data(dataset, data: dict): dataset_contributor = DatasetContributor(dataset) diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index f1da513c..c58a1741 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -31,16 +31,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - bigint_timestamp = self.date - unix_timestamp = bigint_timestamp / 1000 - datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) - return { - "id": self.id, - "date": datetime_obj.strftime("%m-%d-%Y"), - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetDate(dataset) diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index b5acccc4..4d8e1cc1 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -38,12 +38,6 @@ def to_dict(self): "details": self.details, } - def to_dict_metadata(self): - return { - "direct": self.direct, - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_de_ident_level = DatasetDeIdentLevel(dataset) diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 6660e1f9..97f3a8a2 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -30,13 +30,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "description": self.description, - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_description = DatasetDescription(dataset) diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index 061e7d31..90c45551 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -38,13 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "name": self.name, - "identifier": self.identifier, - } - @staticmethod def from_data(dataset, data: dict): dataset_funder = DatasetFunder(dataset) diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 0225972b..5a96918f 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -44,20 +44,6 @@ def to_dict(self): "resource_type": self.resource_type, } - def to_dict_metadata(self): - return { - "language": self.language, - "size": self.size, - "resource_type": self.resource_type, - } - - def to_dict_publisher(self): - return { - "managing_organization_name": self.managing_organization_name, - "managing_organization_ror_id": self.managing_organization_ror_id, - "publisher": self.publisher, - } - @staticmethod def from_data(dataset, data: dict): dataset_other = DatasetOther(dataset) diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py new file mode 100644 index 00000000..7fa75b3a --- /dev/null +++ b/model/dataset_metadata/dataset_readme.py @@ -0,0 +1,31 @@ +from ..db import db + + +class DatasetReadme(db.Model): # type: ignore + def __init__(self, dataset): + self.dataset = dataset + self.content = "" + + __tablename__ = "dataset_readme" + content = db.Column(db.String, nullable=False) + + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) + dataset = db.relationship("Dataset", back_populates="dataset_readme") + + def to_dict(self): + return { + "id": self.dataset_id, + "content": self.content, + } + + @staticmethod + def from_data(dataset, data: dict): + dataset_readme = DatasetReadme(dataset) + dataset_readme.update(data) + return dataset_readme + + def update(self, data: dict): + self.content = data["content"] + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 9f2d9b94..993af3f2 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -17,12 +17,6 @@ def __init__(self, dataset): dataset = db.relationship("Dataset", back_populates="dataset_record_keys") def to_dict(self): - return { - "type": self.key_type, - "details": self.key_details, - } - - def to_dict_metadata(self): return { "key_type": self.key_type, "key_details": self.key_details, diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index f95db200..579bed63 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -52,6 +52,7 @@ def to_dict(self): key=lambda creator: creator.created_at, ) creators = [c for c in sorted_contributors if c.creator] + contributors = [c for c in sorted_contributors if not c.creator] return { "id": self.id, @@ -96,39 +97,6 @@ def to_dict(self): ], } - def to_dict_metadata(self): - bigint_timestamp = self.dataset_related_item_other.publication_year - pub_year = "" - if bigint_timestamp: - unix_timestamp = bigint_timestamp / 1000 - datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) - pub_year = datetime_obj.strftime("%Y") - sorted_contributors = sorted( - self.dataset_related_item_contributor, - key=lambda creator: creator.created_at, - ) - - creators = [c for c in sorted_contributors if c.creator] - contributors = [c for c in sorted_contributors if not c.creator] - return { - "type": self.type, - "titles": [ - i.to_dict_metadata() - for i in self.dataset_related_item_title # type: ignore - ], - "identifiers": [ - i.to_dict_metadata() - for i in self.dataset_related_item_identifier # type: ignore - ], - "creators": [i.to_dict_metadata() for i in creators], # type: ignore - "contributors": [ - i.to_dict_metadata() for i in contributors # type: ignore - ], - # "publication_year": self.dataset_related_item_other.publication_year, - "publication_year": pub_year if bigint_timestamp else None, - "publisher": self.dataset_related_item_other.publisher, - } - @staticmethod def from_data(dataset, data: dict): dataset_related_item = DatasetRelatedItem(dataset) diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index 480757cb..b38b3651 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -36,14 +36,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "name": self.name, - "name_type": self.name_type, - "contributor_type": self.contributor_type, - } - @staticmethod def from_data(dataset_related_item, data: dict, creator): contributor_ = DatasetRelatedItemContributor(dataset_related_item, creator) diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 63d95f4b..48b2e548 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -38,13 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "identifier": self.identifier, - "type": self.type, - } - @staticmethod def from_data(dataset_related_item, data: dict): identifier_ = DatasetRelatedItemIdentifier(dataset_related_item) diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index fab997e5..2d4ea08d 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -32,13 +32,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "title": self.title, - "type": self.type, - } - @staticmethod def from_data(dataset_related_item, data: dict): dataset_related_item_title = DatasetRelatedItemTitle(dataset_related_item) diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 77103cde..27ad1a63 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -33,13 +33,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "rights": self.rights, - "identifier": self.identifier, - } - @staticmethod def from_data(dataset, data: dict): dataset_rights = DatasetRights(dataset) diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 0e6d5792..43ea560e 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -35,13 +35,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "subject": self.subject, - "scheme": self.scheme, - } - @staticmethod def from_data(dataset, data: dict): dataset_subject = DatasetSubject(dataset) diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index f8426471..eff54c9f 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -30,13 +30,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - return { - "id": self.id, - "title": self.title, - "type": self.type, - } - @staticmethod def from_data(dataset, data: dict): dataset_title = DatasetTitle(dataset) diff --git a/model/study.py b/model/study.py index 667b6f6f..a919bbae 100644 --- a/model/study.py +++ b/model/study.py @@ -172,53 +172,6 @@ def to_dict(self): "role": contributor_permission.to_dict()["role"], } - def to_dict_study_metadata(self): - # self.study_contact: Iterable = [] - primary = [ - i.to_dict_metadata() - for i in self.study_identification # type: ignore - if not i.secondary - ] - - return { - "arms": [i.to_dict_metadata() for i in self.study_arm], # type: ignore - "available_ipd": [ - i.to_dict_metadata() for i in self.study_available_ipd # type: ignore - ], - "contacts": [ - i.to_dict_metadata() for i in self.study_contact # type: ignore - ], - "description": self.study_description.to_dict_metadata(), - "design": self.study_design.to_dict(), - "eligibility": self.study_eligibility.to_dict_metadata(), - "primary_identifier": primary[0] if len(primary) else None, - "secondary_identifiers": [ - i.to_dict_metadata() - for i in self.study_identification # type: ignore - if i.secondary - ], - "interventions": [ - i.to_dict_metadata() for i in self.study_intervention # type: ignore - ], - "ipd_sharing": self.study_ipdsharing.to_dict_metadata(), - "links": [i.to_dict_metadata() for i in self.study_link], # type: ignore - "locations": [ - i.to_dict_metadata() for i in self.study_location # type: ignore - ], - "overall_officials": [ - i.to_dict_metadata() - for i in self.study_overall_official # type: ignore - ], - "references": [ - i.to_dict_metadata() for i in self.study_reference # type: ignore - ], - "sponsors": self.study_sponsors_collaborators.to_dict_metadata(), - "collaborators": self.study_sponsors_collaborators.collaborator_name, - "status": self.study_status.to_dict_metadata(), - "oversight": self.study_other.oversight_has_dmc, - "conditions": self.study_other.conditions, - } - @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index db4609cf..01a9eed6 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -43,14 +43,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "label": self.label, - "description": self.description, - } - @staticmethod def from_data(study: model.Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index 5ae9913c..cd65d626 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -40,10 +40,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return {"identifier": self.identifier, "url": self.url} - @staticmethod def from_data(study: model.StudyArm, data: dict): """Creates a new study metadata from a dictionary""" diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index d2c5c5d3..4f943e6f 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -48,16 +48,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "name": self.name, - "affiliation": self.affiliation, - "phone": self.phone, - "email_address": self.email_address, - } - @staticmethod def from_data(study: Study, data: dict, role, central_contact): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index f5115882..be3289da 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -32,10 +32,6 @@ def to_dict(self): "detailed_description": self.detailed_description, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return {"brief_summary": self.brief_summary} - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 81019b09..752a69f4 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -67,17 +67,6 @@ def to_dict(self): else None, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "gender": self.gender, - "gender_based": self.gender_based, - "minimum_age_value": self.minimum_age_value, - "maximum_age_value": self.maximum_age_value, - "inclusion_criteria": self.inclusion_criteria, - "exclusion_criteria": self.exclusion_criteria, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 299f20bd..f6373bf1 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -41,14 +41,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "identifier": self.identifier, - "identifier_type": self.identifier_type, - "id": self.id, - } - @staticmethod def from_data(study: Study, data: dict, secondary): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index bb946cd2..0230b832 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -45,14 +45,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "type": self.type, - "name": self.name, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 9152959c..9d7868bc 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -46,13 +46,6 @@ def to_dict(self): "ipd_sharing_url": self.ipd_sharing_url, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "ipd_sharing": self.ipd_sharing, - "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index aa3ba44e..946704d2 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -36,14 +36,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "title": self.title, - "url": self.url, - "id": self.id, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 27ac6476..06e640d6 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -44,15 +44,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "facility": self.facility, - "city": self.city, - "country": self.country, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 237d5b98..f66758e9 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -41,13 +41,6 @@ def to_dict(self): "size": self.size, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "oversight_has_dmc": self.oversight_has_dmc, - "conditions": self.conditions, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 069f3099..e173cd0b 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -38,14 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "name": self.name, - "role": self.role, - "affiliation": self.affiliation, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index cce05886..113af7b0 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -38,14 +38,6 @@ def to_dict(self): "created_at": self.created_at, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "identifier": self.identifier, - "citation": self.citation, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 9f916611..a8ab84f5 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -45,14 +45,6 @@ def to_dict(self): "lead_sponsor_name": self.lead_sponsor_name, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "responsible_party_type": self.responsible_party_type, - "responsible_party_investigator_name": self.responsible_party_investigator_name, - "lead_sponsor_name": self.lead_sponsor_name, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index dd1ffe78..24920e32 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -43,13 +43,6 @@ def to_dict(self): "completion_date_type": self.completion_date_type, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "overall_status": self.overall_status, - "start_date": self.start_date, - } - @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/version.py b/model/version.py index 3aa3bb5c..9cff0264 100644 --- a/model/version.py +++ b/model/version.py @@ -4,7 +4,6 @@ from sqlalchemy import Table -import model from model.dataset import Dataset from .db import db @@ -22,7 +21,6 @@ def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() - self.version_readme = model.VersionReadme(self) __tablename__ = "version" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/version_readme.py b/model/version_readme.py index c04bd1b5..ec657f54 100644 --- a/model/version_readme.py +++ b/model/version_readme.py @@ -2,10 +2,6 @@ class VersionReadme(db.Model): # type: ignore - def __init__(self, version): - self.version = version - self.content = "" - __tablename__ = "version_readme" content = db.Column(db.String, nullable=True) @@ -16,14 +12,14 @@ def __init__(self, version): def to_dict(self): return { - "readme": self.content, + "content": self.content, } @staticmethod - def from_data(version, data: dict): - readme = VersionReadme(version) - readme.update(data) - return readme + def from_data(data: dict): + user = VersionReadme() + user.update(data) + return user def update(self, data: dict): - self.content = data["readme"] + self.content = data["content"] diff --git a/poetry.lock b/poetry.lock index b9e4bb35..e322ea82 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,26 +1,5 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. -[[package]] -name = "alembic" -version = "1.12.1" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.7" -files = [ - {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, - {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, -] - -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["python-dateutil"] - [[package]] name = "aniso8601" version = "9.0.1" @@ -679,6 +658,51 @@ docopt = ">=0.6" minilog = ">=2.0" requests = ">=2.28,<3.0" +[[package]] +name = "cryptography" +version = "41.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, + {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, + {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, + {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "debugpy" version = "1.6.7" @@ -1649,6 +1673,19 @@ files = [ {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, ] +[[package]] +name = "jwt" +version = "1.3.1" +description = "JSON Web Token library for Python 3." +optional = false +python-versions = ">= 3.6" +files = [ + {file = "jwt-1.3.1-py3-none-any.whl", hash = "sha256:61c9170f92e736b530655e75374681d4fcca9cfa8763ab42be57353b2b203494"}, +] + +[package.dependencies] +cryptography = ">=3.1,<3.4.0 || >3.4.0" + [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -1694,25 +1731,6 @@ files = [ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] -[[package]] -name = "mako" -version = "1.3.0" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, - {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - [[package]] name = "markdown" version = "3.3.7" @@ -2642,7 +2660,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2650,15 +2667,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2675,7 +2685,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2683,7 +2692,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3258,6 +3266,7 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" +category = "dev" optional = false python-versions = "*" files = [ @@ -3569,4 +3578,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "6baf6175b51ec48a4653b31437eb4d02ff6385d8f973566192dbd31cb9c8c586" +content-hash = "e3e1b2d0645e5cd7ad0281091d65e85b411eab0ddd7c475762e908bf9c10bdb4" diff --git a/pyproject.toml b/pyproject.toml index 60636010..2dee3a88 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,6 @@ python-dotenv = "^1.0.0" flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" email-validator = "^2.0.0.post2" -alembic = "^1.12.1" [tool.poetry.group.dev.dependencies] diff --git a/sql/init.sql b/sql/init.sql index 1127e19a..bd8d6c79 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -243,7 +243,19 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme +CREATE TABLE IF NOT EXISTS "dataset_readme" ( + "id" CHAR(36) NOT NULL, + "content" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); +-- Dumping data for table public.dataset_readme: 1 rows +/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index cfcb1cb1..9edd316b 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -297,9 +297,19 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme +CREATE TABLE IF NOT EXISTS "dataset_readme" ( + "id" CHAR(36) NOT NULL, + "content" VARCHAR NOT NULL, + "dataset_id" CHAR(36) NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); -- Dumping data for table public.dataset_readme: -1 rows /*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; +INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES + ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 13099053..50bc66b3 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -50,23 +50,6 @@ def test_put_dataset_access_metadata(_logged_in_client): # ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # -def test_get_alternative_identifier(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset alternative identifier content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - - assert response.status_code == 200 - - def test_post_alternative_identifier(_logged_in_client): """ Given a Flask application configured for testing and a study ID @@ -82,7 +65,7 @@ def test_post_alternative_identifier(_logged_in_client): json=[ { "identifier": "identifier test", - "type": "ARK", + "type": "ark", } ], ) @@ -92,7 +75,24 @@ def test_post_alternative_identifier(_logged_in_client): pytest.global_alternative_identifier_id = response_data[0]["id"] assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ARK" + assert response_data[0]["type"] == "ark" + + +def test_get_alternative_identifier(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset alternative identifier content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + + assert response.status_code == 200 def test_delete_alternative_identifier(_logged_in_client): @@ -685,6 +685,49 @@ def test_put_dataset_publisher_metadata(_logged_in_client): ) +# ------------------- README METADATA ------------------- # +def test_get_dataset_readme_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + readme metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/readme" + ) + + assert response.status_code == 200 + + +def test_put_dataset_readme_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + readme metadata content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/readme", + json={ + "content": "This is the readme content", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data["content"] == "This is the readme content" + + # ------------------- RECORD KEYS METADATA ------------------- # def test_get_dataset_record_keys_metadata(_logged_in_client): """ @@ -723,8 +766,8 @@ def test_put_dataset_record_keys_metadata(_logged_in_client): assert response.status_code == 201 response_data = json.loads(response.data) - assert response_data["type"] == "Record Type" - assert response_data["details"] == "Details for Record Keys" + assert response_data["key_type"] == "Record Type" + assert response_data["key_details"] == "Details for Record Keys" # ------------------- RELATED ITEM METADATA ------------------- # @@ -749,7 +792,7 @@ def test_get_dataset_related_item_metadata(_logged_in_client): def test_post_dataset_related_item_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-item' + When the '/study/{study_id}/dataset' endpoint is requested (POST) Then check that the response is valid and creates the dataset related item metadata content @@ -777,7 +820,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): "metadata_scheme": "Metadata Scheme", "scheme_type": "Scheme Type", "scheme_uri": "Scheme URI", - "type": "ARK", + "type": "ark", } ], "issue": "Issue", @@ -819,7 +862,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["identifiers"][0]["type"] == "ARK" + assert response_data[0]["identifiers"][0]["type"] == "ark" assert response_data[0]["issue"] == "Issue" assert response_data[0]["last_page"] == "Last Page" assert response_data[0]["number_type"] == "Number Type" From 8b658ca05448c04a7d9a7dbcdf5497fc273cf9ee Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 10 Nov 2023 14:00:57 -0800 Subject: [PATCH 354/505] =?UTF-8?q?feat:=20=E2=9C=A8=20add=20support=20for?= =?UTF-8?q?=20dataset=20version=20metadata=20in=20the=20API=20(#24)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add study metadata GET endpoint * fix: study design schema * feat: GET version study metadata * style: format * fix: dataset overview * fix: study metadata overview * style: 🎨 fix code style issues with Black * fix: format * style: pylint and flake errors * feat: changelog and readme endpoints * fix: dataset dataset version GET * fix: study version GET * fix: typo * style: 🎨 fix code style issues with Black * fix: schema * fix: version request * style: 🎨 fix code style issues with Black * style: format * fix: dataset version endpoints * fix: related items version * fix: delete dataset readme * feat: alembic file for deleted dataset readme table * style: 🎨 fix code style issues with Black * feat Contributor type return for minimised version metadata * style: 🎨 fix code style issues with Black * fix: alembix version * 👷 ci: add db migrations to dockerfile * fix: dataset title name became as uppercase * style: 🎨 fix code style issues with Black * style: format * fix: main title * fix: testing errors * style: format * fix: remove comment * fix: remove comment --------- Co-authored-by: aydawka Co-authored-by: Lint Action --- Dockerfile | 3 + ...2ac2b020c7c_delete_dataset_readme_table.py | 22 +++ apis/__init__.py | 2 - apis/dataset.py | 146 ++++++++++++++---- apis/dataset_metadata/dataset_readme.py | 33 ---- apis/dataset_metadata/dataset_related_item.py | 1 - apis/dataset_metadata/dataset_title.py | 10 +- apis/study_metadata/study_design.py | 19 +-- apis/study_metadata/study_other.py | 3 +- model/__init__.py | 2 - model/dataset.py | 57 ++++++- model/dataset_metadata/dataset_access.py | 6 + .../dataset_alternate_identifier.py | 7 + model/dataset_metadata/dataset_consent.py | 7 + model/dataset_metadata/dataset_contributor.py | 9 ++ model/dataset_metadata/dataset_date.py | 10 ++ .../dataset_de_ident_level.py | 6 + model/dataset_metadata/dataset_description.py | 7 + model/dataset_metadata/dataset_funder.py | 7 + model/dataset_metadata/dataset_other.py | 14 ++ model/dataset_metadata/dataset_readme.py | 31 ---- model/dataset_metadata/dataset_record_keys.py | 6 + .../dataset_metadata/dataset_related_item.py | 34 +++- .../dataset_related_item_contributor.py | 8 + .../dataset_related_item_identifier.py | 7 + .../dataset_related_item_title.py | 7 + model/dataset_metadata/dataset_rights.py | 7 + model/dataset_metadata/dataset_subject.py | 7 + model/dataset_metadata/dataset_title.py | 7 + model/study.py | 47 ++++++ model/study_metadata/study_arm.py | 8 + model/study_metadata/study_available_ipd.py | 4 + model/study_metadata/study_contact.py | 10 ++ model/study_metadata/study_description.py | 4 + model/study_metadata/study_eligibility.py | 11 ++ model/study_metadata/study_identification.py | 8 + model/study_metadata/study_intervention.py | 8 + model/study_metadata/study_ipdsharing.py | 7 + model/study_metadata/study_link.py | 8 + model/study_metadata/study_location.py | 9 ++ model/study_metadata/study_other.py | 7 + .../study_metadata/study_overall_official.py | 8 + model/study_metadata/study_reference.py | 8 + .../study_sponsors_collaborators.py | 8 + model/study_metadata/study_status.py | 7 + model/version.py | 2 + model/version_readme.py | 16 +- poetry.lock | 111 ++++++------- pyproject.toml | 1 + sql/init.sql | 12 -- sql/init_timezones.sql | 10 -- .../test_study_dataset_metadata_api.py | 91 +++-------- 52 files changed, 603 insertions(+), 287 deletions(-) create mode 100644 alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py delete mode 100644 apis/dataset_metadata/dataset_readme.py delete mode 100644 model/dataset_metadata/dataset_readme.py diff --git a/Dockerfile b/Dockerfile index 24060626..7be75a98 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,4 +22,7 @@ COPY core ./core COPY app.py . COPY config.py . +# run database migrations +RUN alembic upgrade head + CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file diff --git a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py new file mode 100644 index 00000000..f2ea30f3 --- /dev/null +++ b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py @@ -0,0 +1,22 @@ +"""delete dataset readme table + +Revision ID: 72ac2b020c7c +Revises: +Create Date: 2023-11-08 15:47:00.205940 + +""" +from typing import Sequence, Union + +import alembic +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "72ac2b020c7c" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + alembic.op.drop_table("dataset_readme") diff --git a/apis/__init__.py b/apis/__init__.py index bfb5d0c0..bf2e0873 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -16,7 +16,6 @@ from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder from .dataset_metadata.dataset_other import api as dataset_other -from .dataset_metadata.dataset_readme import api as readme from .dataset_metadata.dataset_record_keys import api as record_keys from .dataset_metadata.dataset_related_item import api as related_item from .dataset_metadata.dataset_rights import api as rights @@ -63,7 +62,6 @@ "description", "funder", "dataset_other", - "readme", "record_keys", "related_item", "api", diff --git a/apis/dataset.py b/apis/dataset.py index dc609a75..ff933fda 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,6 +1,6 @@ import typing -from flask import Response, jsonify, request +from flask import jsonify, request from flask_restx import Namespace, Resource, fields import model @@ -32,6 +32,8 @@ "created_at": fields.String(required=True), "dataset_versions": fields.Nested(dataset_versions_model, required=True), "latest_version": fields.String(required=True), + "title": fields.String(required=True), + "description": fields.String(required=True), }, ) @@ -41,6 +43,7 @@ class DatasetList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset) + @api.doc("view datasets") def get(self, study_id): study = model.Study.query.get(study_id) datasets = model.Dataset.query.filter_by(study=study) @@ -48,13 +51,12 @@ def get(self, study_id): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("update dataset") + @api.doc("add datasets") @api.expect(dataset) def post(self, study_id): study = model.Study.query.get(study_id) if not is_granted("add_dataset", study): return "Access denied, you can not modify", 403 - # todo if study.participant id== different study Throw error data: typing.Union[typing.Any, dict] = request.json dataset_ = model.Dataset.from_data(study) model.db.session.add(dataset_) @@ -77,6 +79,7 @@ def post(self, study_id): @api.route("/study//dataset/") @api.response(201, "Success") @api.response(400, "Validation Error") +@api.doc("view dataset") class DatasetResource(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") @@ -86,6 +89,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.response(201, "Success") @api.response(400, "Validation Error") + @api.doc("update dataset") def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) @@ -102,26 +106,20 @@ def put(self, study_id: int, dataset_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.doc("delete dataset") def delete(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 + data_obj = model.Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: model.db.session.delete(version) + model.db.session.delete(data_obj) model.db.session.commit() return 204 - # def delete(self, study_id: int, dataset_id: int, version_id: int): - # data_obj = Dataset.query.get(dataset_id) - # for version in data_obj.dataset_versions: - # db.session.delete(version) - # db.session.commit() - # db.session.delete(data_obj) - # db.session.commit() - # return Response(status=204) - @api.route("/study//dataset//version/") class VersionResource(Resource): @@ -131,9 +129,15 @@ class VersionResource(Resource): def get( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict() + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("update dataset version") def put( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument @@ -145,26 +149,86 @@ def put( model.db.session.commit() return jsonify(data_version_obj.to_dict()), 201 + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("delete dataset version") def delete( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): return "Access denied, you can not modify", 403 - data_obj = model.Dataset.query.get(dataset_id) - for version in data_obj.dataset_versions: - model.db.session.delete(version) - model.db.session.commit() - model.db.session.delete(data_obj) + version_obj = model.Version.query.get(version_id) + model.db.session.delete(version_obj) + model.db.session.commit() + return 204 + + +@api.route("/study//dataset//version//changelog") +class VersionDatasetChangelog(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version changelog") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return {"changelog": version.changelog} + + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version changelog update") + def put( + self, study_id: str, dataset_id: str, version_id: str + ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + data: typing.Union[typing.Any, dict] = request.json + version_ = model.Version.query.get(version_id) + version_.changelog = data["changelog"] + model.db.session.commit() + return 201 + + +@api.route("/study//dataset//version//readme") +class VersionDatasetReadme(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version readme") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return version.version_readme.to_dict(), 200 + + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version readme update") + def put( + self, study_id: str, dataset_id: str, version_id: str + ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + data = request.json + version_ = model.Version.query.get(version_id) + version_.version_readme.update(data) model.db.session.commit() - return Response(status=204) + return 201 @api.route("/study//dataset//version") class VersionList(Resource): @api.response(201, "Success") @api.response(400, "Validation Error") - @api.doc("versions") + @api.doc("view versions") def get(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("version", study): @@ -202,21 +266,37 @@ def post(self, study_id: int, dataset_id: int): # return "Access denied, you can not modify", 403 # data_obj = model.Version.query.get(version_id) # data: typing.Union[typing.Any, dict] = request.json -# dataset_versions = model.Version.from_data(data_obj, data) # model.db.session.commit() # return dataset_versions.to_dict() -# -# @api.route("/study//dataset/ -# /version//dataset-metadata") -# class VersionStudyMetadataResource(Resource): -# @api.response(201, "Success") -# @api.response(400, "Validation Error") -# @api.doc("version dataset metadata get") -# def get(self, study_id: int, dataset_id: int, version_id): -# study = model.Study.query.get(study_id) -# if not is_granted("dataset", study): -# return "Access denied, you can not modify", 403 -# version = dataset.dataset_version.get(version_id) -# pass +@api.route("/study//dataset//version//study-metadata") +class VersionDatasetMetadataResource(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version study metadata get") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return version.dataset.study.to_dict_study_metadata() + + +@api.route( + "/study//dataset//version//dataset-metadata" +) +class VersionStudyMetadataResource(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version dataset metadata get") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return version.dataset.to_dict_dataset_metadata() diff --git a/apis/dataset_metadata/dataset_readme.py b/apis/dataset_metadata/dataset_readme.py deleted file mode 100644 index edd1e0ad..00000000 --- a/apis/dataset_metadata/dataset_readme.py +++ /dev/null @@ -1,33 +0,0 @@ -from flask import request -from flask_restx import Resource, fields - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_readme = api.model( - "DatasetReadme", - {"id": fields.String(required=True), "content": fields.String(required=True)}, -) - - -@api.route("/study//dataset//metadata/readme") -class DatasetReadmeResource(Resource): - @api.doc("readme") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_readme) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - dataset_ = model.Dataset.query.get(dataset_id) - dataset_readme_ = dataset_.dataset_readme - return dataset_readme_.to_dict() - - def put(self, study_id: int, dataset_id: int): - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_readme.update(data) - model.db.session.commit() - return dataset_.dataset_readme.to_dict() diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index df5bca5b..da3defdd 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -160,7 +160,6 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", - "MainTitle", ], }, }, diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index b3d245c8..23793e7e 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -63,7 +63,6 @@ def post(self, study_id: int, dataset_id: int): "Subtitle", "TranslatedTitle", "OtherTitle", - "MainTitle", ], }, }, @@ -83,17 +82,12 @@ def post(self, study_id: int, dataset_id: int): for i in data: if "id" in i and i["id"]: dataset_title_ = model.DatasetTitle.query.get(i["id"]) - # if dataset_title_.type == "MainTitle": - # return ( - # "Main Title type can not be modified", - # 403, - # dataset_title_.update(i) list_of_elements.append(dataset_title_.to_dict()) elif "id" not in i or not i["id"]: if i["type"] == "MainTitle": return ( - "MainTitle type can not be given", + "Main Title type can not be given", 403, ) dataset_title_ = model.DatasetTitle.from_data(data_obj, i) @@ -125,7 +119,7 @@ def delete( dataset_title_ = model.DatasetTitle.query.get(title_id) if dataset_title_.type == "MainTitle": return ( - "MainTitle type can not be deleted", + "Main Title type can not be deleted", 403, ) model.db.session.delete(dataset_title_) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 0b8fd32c..ebadf2e6 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -13,7 +13,6 @@ study_design = api.model( "StudyDesign", { - "id": fields.String(required=True), "design_allocation": fields.String(required=True), "study_type": fields.String(required=True), "design_intervention_model": fields.String(required=True), @@ -74,7 +73,7 @@ def put(self, study_id: int): "type": ["string", "null"], }, "design_who_masked_list": { - "type": "array", + "type": ["array", "null"], "items": { "type": "string", "oneOf": [ @@ -91,7 +90,7 @@ def put(self, study_id: int): "uniqueItems": True, }, "phase_list": { - "type": "array", + "type": ["array", "null"], "items": { "type": "string", "oneOf": [ @@ -111,9 +110,9 @@ def put(self, study_id: int): }, "uniqueItems": True, }, - "enrollment_count": {"type": "integer"}, + "enrollment_count": {"type": ["integer", "null"]}, "enrollment_type": { - "type": "string", + "type": ["string", "null"], "enum": ["Actual", "Anticipated"], }, "number_arms": {"type": ["integer", "null"]}, @@ -128,7 +127,7 @@ def put(self, study_id: int): "Case-Control", "Case-Only", "Case-Crossover", - "Ecologic or Community Study", + "Ecologic or Community", "Family-Based", "Other", ] @@ -155,12 +154,8 @@ def put(self, study_id: int): "uniqueItems": True, }, "bio_spec_retention": {"type": ["string", "null"]}, - "bio_spec_description": { - "type": ["string", "null"], - }, - "target_duration": { - "type": ["string", "null"], - }, + "bio_spec_description": {"type": ["string", "null"]}, + "target_duration": {"type": ["string", "null"]}, "number_groups_cohorts": {"type": ["integer", "null"]}, }, } diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 9030641c..fd04de4a 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -91,8 +91,7 @@ def get(self, study_id: int): study_ = model.Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc - - return study_oversight_has_dmc + return {"oversight": study_oversight_has_dmc} def put(self, study_id: int): """Update study oversight metadata""" diff --git a/model/__init__.py b/model/__init__.py index 5b91a29a..77f8ffeb 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -18,7 +18,6 @@ from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder from .dataset_metadata.dataset_other import DatasetOther -from .dataset_metadata.dataset_readme import DatasetReadme from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights from .dataset_metadata.dataset_subject import DatasetSubject @@ -71,7 +70,6 @@ "DatasetFunder", "DatasetAlternateIdentifier", "DatasetRights", - "DatasetReadme", "DatasetRecordKeys", "DatasetTitle", "DatasetSubject", diff --git a/model/dataset.py b/model/dataset.py index 98cdc078..96bc4d41 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -20,7 +20,6 @@ def __init__(self, study): self.dataset_record_keys = model.DatasetRecordKeys(self) self.dataset_de_ident_level = model.DatasetDeIdentLevel(self) self.dataset_consent = model.DatasetConsent(self) - self.dataset_readme = model.DatasetReadme(self) self.dataset_other = model.DatasetOther(self) self.dataset_title.append(model.DatasetTitle(self)) @@ -89,9 +88,6 @@ def __init__(self, study): dataset_other = db.relationship( "DatasetOther", back_populates="dataset", uselist=False, cascade="all, delete" ) - dataset_readme = db.relationship( - "DatasetReadme", back_populates="dataset", uselist=False, cascade="all, delete" - ) dataset_record_keys = db.relationship( "DatasetRecordKeys", back_populates="dataset", @@ -113,14 +109,61 @@ def __init__(self, study): def to_dict(self): last_published = self.last_published() - # last_modified = self.last_modified() - return { "id": self.id, "created_at": self.created_at, # "dataset_versions": [i.to_dict() for i in self.dataset_versions], "latest_version": last_published.id if last_published else None, - # "title": self.dataset_title.title if self.dataset_title else "" + "title": [ + i.title if i.title else None for i in self.dataset_title # type: ignore + ][0], + "description": [ + i.description if i.type == "Abstract" else None + for i in self.dataset_description # type: ignore + ][0], + } + + def to_dict_dataset_metadata(self): + return { + "contributors": [ + i.to_dict_metadata() + for i in self.dataset_contributors # type: ignore + if not i.creator + ], + "about": self.dataset_other.to_dict_metadata(), + "publisher": self.dataset_other.to_dict_publisher(), # type: ignore + "access": self.dataset_access.to_dict_metadata(), + "consent": self.dataset_consent.to_dict_metadata(), + "dates": [i.to_dict_metadata() for i in self.dataset_date], # type: ignore + "de_identification": self.dataset_de_ident_level.to_dict_metadata(), + "descriptions": [ + i.to_dict_metadata() for i in self.dataset_description # type: ignore + ], + "funders": [ + i.to_dict_metadata() for i in self.dataset_funder # type: ignore + ], + "identifiers": [ + i.to_dict_metadata() + for i in self.dataset_alternate_identifier # type: ignore + ], + "creators": [ + i.to_dict_metadata() + for i in self.dataset_contributors # type: ignore + if i.creator + ], + "record_keys": self.dataset_record_keys.to_dict_metadata(), + "related_items": [ + i.to_dict_metadata() for i in self.dataset_related_item # type: ignore + ], + "rights": [ + i.to_dict_metadata() for i in self.dataset_rights # type: ignore + ], + "subjects": [ + i.to_dict_metadata() for i in self.dataset_subject # type: ignore + ], + "titles": [ + i.to_dict_metadata() for i in self.dataset_title # type: ignore + ], } def last_published(self): diff --git a/model/dataset_metadata/dataset_access.py b/model/dataset_metadata/dataset_access.py index 577a5e14..7ef72ffb 100644 --- a/model/dataset_metadata/dataset_access.py +++ b/model/dataset_metadata/dataset_access.py @@ -30,6 +30,12 @@ def to_dict(self): "url": self.url, } + def to_dict_metadata(self): + return { + "type": self.type, + "description": self.description, + } + @staticmethod def from_data(dataset: Dataset, data: dict): dataset_access = DatasetAccess(dataset) diff --git a/model/dataset_metadata/dataset_alternate_identifier.py b/model/dataset_metadata/dataset_alternate_identifier.py index 1b2ebb70..7eba2e98 100644 --- a/model/dataset_metadata/dataset_alternate_identifier.py +++ b/model/dataset_metadata/dataset_alternate_identifier.py @@ -28,6 +28,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "type": self.type, + "identifier": self.identifier, + } + @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetAlternateIdentifier(dataset) diff --git a/model/dataset_metadata/dataset_consent.py b/model/dataset_metadata/dataset_consent.py index 2258ea5d..e7ea1cd3 100644 --- a/model/dataset_metadata/dataset_consent.py +++ b/model/dataset_metadata/dataset_consent.py @@ -38,6 +38,13 @@ def to_dict(self): "details": self.details, } + def to_dict_metadata(self): + return { + "noncommercial": self.noncommercial, + "geog_restrict": self.geog_restrict, + "research_type": self.research_type, + } + @staticmethod def from_data(dataset, data: dict): dataset_consent = DatasetConsent(dataset) diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py index 6d4f54d7..2e14d02a 100644 --- a/model/dataset_metadata/dataset_contributor.py +++ b/model/dataset_metadata/dataset_contributor.py @@ -40,6 +40,15 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "name": self.name, + "name_type": self.name_identifier, + "contributor_type": self.contributor_type, + "creator": self.creator, + } + @staticmethod def from_data(dataset, data: dict): dataset_contributor = DatasetContributor(dataset) diff --git a/model/dataset_metadata/dataset_date.py b/model/dataset_metadata/dataset_date.py index c58a1741..f1da513c 100644 --- a/model/dataset_metadata/dataset_date.py +++ b/model/dataset_metadata/dataset_date.py @@ -31,6 +31,16 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + bigint_timestamp = self.date + unix_timestamp = bigint_timestamp / 1000 + datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) + return { + "id": self.id, + "date": datetime_obj.strftime("%m-%d-%Y"), + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_date = DatasetDate(dataset) diff --git a/model/dataset_metadata/dataset_de_ident_level.py b/model/dataset_metadata/dataset_de_ident_level.py index 4d8e1cc1..b5acccc4 100644 --- a/model/dataset_metadata/dataset_de_ident_level.py +++ b/model/dataset_metadata/dataset_de_ident_level.py @@ -38,6 +38,12 @@ def to_dict(self): "details": self.details, } + def to_dict_metadata(self): + return { + "direct": self.direct, + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_de_ident_level = DatasetDeIdentLevel(dataset) diff --git a/model/dataset_metadata/dataset_description.py b/model/dataset_metadata/dataset_description.py index 97f3a8a2..6660e1f9 100644 --- a/model/dataset_metadata/dataset_description.py +++ b/model/dataset_metadata/dataset_description.py @@ -30,6 +30,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "description": self.description, + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_description = DatasetDescription(dataset) diff --git a/model/dataset_metadata/dataset_funder.py b/model/dataset_metadata/dataset_funder.py index 90c45551..061e7d31 100644 --- a/model/dataset_metadata/dataset_funder.py +++ b/model/dataset_metadata/dataset_funder.py @@ -38,6 +38,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "name": self.name, + "identifier": self.identifier, + } + @staticmethod def from_data(dataset, data: dict): dataset_funder = DatasetFunder(dataset) diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 5a96918f..0225972b 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -44,6 +44,20 @@ def to_dict(self): "resource_type": self.resource_type, } + def to_dict_metadata(self): + return { + "language": self.language, + "size": self.size, + "resource_type": self.resource_type, + } + + def to_dict_publisher(self): + return { + "managing_organization_name": self.managing_organization_name, + "managing_organization_ror_id": self.managing_organization_ror_id, + "publisher": self.publisher, + } + @staticmethod def from_data(dataset, data: dict): dataset_other = DatasetOther(dataset) diff --git a/model/dataset_metadata/dataset_readme.py b/model/dataset_metadata/dataset_readme.py deleted file mode 100644 index 7fa75b3a..00000000 --- a/model/dataset_metadata/dataset_readme.py +++ /dev/null @@ -1,31 +0,0 @@ -from ..db import db - - -class DatasetReadme(db.Model): # type: ignore - def __init__(self, dataset): - self.dataset = dataset - self.content = "" - - __tablename__ = "dataset_readme" - content = db.Column(db.String, nullable=False) - - dataset_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False - ) - dataset = db.relationship("Dataset", back_populates="dataset_readme") - - def to_dict(self): - return { - "id": self.dataset_id, - "content": self.content, - } - - @staticmethod - def from_data(dataset, data: dict): - dataset_readme = DatasetReadme(dataset) - dataset_readme.update(data) - return dataset_readme - - def update(self, data: dict): - self.content = data["content"] - self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py index 993af3f2..9f2d9b94 100644 --- a/model/dataset_metadata/dataset_record_keys.py +++ b/model/dataset_metadata/dataset_record_keys.py @@ -17,6 +17,12 @@ def __init__(self, dataset): dataset = db.relationship("Dataset", back_populates="dataset_record_keys") def to_dict(self): + return { + "type": self.key_type, + "details": self.key_details, + } + + def to_dict_metadata(self): return { "key_type": self.key_type, "key_details": self.key_details, diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index 579bed63..f95db200 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -52,7 +52,6 @@ def to_dict(self): key=lambda creator: creator.created_at, ) creators = [c for c in sorted_contributors if c.creator] - contributors = [c for c in sorted_contributors if not c.creator] return { "id": self.id, @@ -97,6 +96,39 @@ def to_dict(self): ], } + def to_dict_metadata(self): + bigint_timestamp = self.dataset_related_item_other.publication_year + pub_year = "" + if bigint_timestamp: + unix_timestamp = bigint_timestamp / 1000 + datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) + pub_year = datetime_obj.strftime("%Y") + sorted_contributors = sorted( + self.dataset_related_item_contributor, + key=lambda creator: creator.created_at, + ) + + creators = [c for c in sorted_contributors if c.creator] + contributors = [c for c in sorted_contributors if not c.creator] + return { + "type": self.type, + "titles": [ + i.to_dict_metadata() + for i in self.dataset_related_item_title # type: ignore + ], + "identifiers": [ + i.to_dict_metadata() + for i in self.dataset_related_item_identifier # type: ignore + ], + "creators": [i.to_dict_metadata() for i in creators], # type: ignore + "contributors": [ + i.to_dict_metadata() for i in contributors # type: ignore + ], + # "publication_year": self.dataset_related_item_other.publication_year, + "publication_year": pub_year if bigint_timestamp else None, + "publisher": self.dataset_related_item_other.publisher, + } + @staticmethod def from_data(dataset, data: dict): dataset_related_item = DatasetRelatedItem(dataset) diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py index b38b3651..480757cb 100644 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ b/model/dataset_metadata/dataset_related_item_contributor.py @@ -36,6 +36,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "name": self.name, + "name_type": self.name_type, + "contributor_type": self.contributor_type, + } + @staticmethod def from_data(dataset_related_item, data: dict, creator): contributor_ = DatasetRelatedItemContributor(dataset_related_item, creator) diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py index 48b2e548..63d95f4b 100644 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ b/model/dataset_metadata/dataset_related_item_identifier.py @@ -38,6 +38,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "identifier": self.identifier, + "type": self.type, + } + @staticmethod def from_data(dataset_related_item, data: dict): identifier_ = DatasetRelatedItemIdentifier(dataset_related_item) diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py index 2d4ea08d..fab997e5 100644 --- a/model/dataset_metadata/dataset_related_item_title.py +++ b/model/dataset_metadata/dataset_related_item_title.py @@ -32,6 +32,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "title": self.title, + "type": self.type, + } + @staticmethod def from_data(dataset_related_item, data: dict): dataset_related_item_title = DatasetRelatedItemTitle(dataset_related_item) diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 27ad1a63..77103cde 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -33,6 +33,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "rights": self.rights, + "identifier": self.identifier, + } + @staticmethod def from_data(dataset, data: dict): dataset_rights = DatasetRights(dataset) diff --git a/model/dataset_metadata/dataset_subject.py b/model/dataset_metadata/dataset_subject.py index 43ea560e..0e6d5792 100644 --- a/model/dataset_metadata/dataset_subject.py +++ b/model/dataset_metadata/dataset_subject.py @@ -35,6 +35,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "subject": self.subject, + "scheme": self.scheme, + } + @staticmethod def from_data(dataset, data: dict): dataset_subject = DatasetSubject(dataset) diff --git a/model/dataset_metadata/dataset_title.py b/model/dataset_metadata/dataset_title.py index eff54c9f..f8426471 100644 --- a/model/dataset_metadata/dataset_title.py +++ b/model/dataset_metadata/dataset_title.py @@ -30,6 +30,13 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + return { + "id": self.id, + "title": self.title, + "type": self.type, + } + @staticmethod def from_data(dataset, data: dict): dataset_title = DatasetTitle(dataset) diff --git a/model/study.py b/model/study.py index a919bbae..667b6f6f 100644 --- a/model/study.py +++ b/model/study.py @@ -172,6 +172,53 @@ def to_dict(self): "role": contributor_permission.to_dict()["role"], } + def to_dict_study_metadata(self): + # self.study_contact: Iterable = [] + primary = [ + i.to_dict_metadata() + for i in self.study_identification # type: ignore + if not i.secondary + ] + + return { + "arms": [i.to_dict_metadata() for i in self.study_arm], # type: ignore + "available_ipd": [ + i.to_dict_metadata() for i in self.study_available_ipd # type: ignore + ], + "contacts": [ + i.to_dict_metadata() for i in self.study_contact # type: ignore + ], + "description": self.study_description.to_dict_metadata(), + "design": self.study_design.to_dict(), + "eligibility": self.study_eligibility.to_dict_metadata(), + "primary_identifier": primary[0] if len(primary) else None, + "secondary_identifiers": [ + i.to_dict_metadata() + for i in self.study_identification # type: ignore + if i.secondary + ], + "interventions": [ + i.to_dict_metadata() for i in self.study_intervention # type: ignore + ], + "ipd_sharing": self.study_ipdsharing.to_dict_metadata(), + "links": [i.to_dict_metadata() for i in self.study_link], # type: ignore + "locations": [ + i.to_dict_metadata() for i in self.study_location # type: ignore + ], + "overall_officials": [ + i.to_dict_metadata() + for i in self.study_overall_official # type: ignore + ], + "references": [ + i.to_dict_metadata() for i in self.study_reference # type: ignore + ], + "sponsors": self.study_sponsors_collaborators.to_dict_metadata(), + "collaborators": self.study_sponsors_collaborators.collaborator_name, + "status": self.study_status.to_dict_metadata(), + "oversight": self.study_other.oversight_has_dmc, + "conditions": self.study_other.conditions, + } + @staticmethod def from_data(data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index 01a9eed6..db4609cf 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -43,6 +43,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "label": self.label, + "description": self.description, + } + @staticmethod def from_data(study: model.Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py index cd65d626..5ae9913c 100644 --- a/model/study_metadata/study_available_ipd.py +++ b/model/study_metadata/study_available_ipd.py @@ -40,6 +40,10 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return {"identifier": self.identifier, "url": self.url} + @staticmethod def from_data(study: model.StudyArm, data: dict): """Creates a new study metadata from a dictionary""" diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py index 4f943e6f..d2c5c5d3 100644 --- a/model/study_metadata/study_contact.py +++ b/model/study_metadata/study_contact.py @@ -48,6 +48,16 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "name": self.name, + "affiliation": self.affiliation, + "phone": self.phone, + "email_address": self.email_address, + } + @staticmethod def from_data(study: Study, data: dict, role, central_contact): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index be3289da..f5115882 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -32,6 +32,10 @@ def to_dict(self): "detailed_description": self.detailed_description, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return {"brief_summary": self.brief_summary} + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 752a69f4..81019b09 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -67,6 +67,17 @@ def to_dict(self): else None, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "gender": self.gender, + "gender_based": self.gender_based, + "minimum_age_value": self.minimum_age_value, + "maximum_age_value": self.maximum_age_value, + "inclusion_criteria": self.inclusion_criteria, + "exclusion_criteria": self.exclusion_criteria, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index f6373bf1..299f20bd 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -41,6 +41,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "identifier": self.identifier, + "identifier_type": self.identifier_type, + "id": self.id, + } + @staticmethod def from_data(study: Study, data: dict, secondary): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 0230b832..bb946cd2 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -45,6 +45,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "type": self.type, + "name": self.name, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py index 9d7868bc..9152959c 100644 --- a/model/study_metadata/study_ipdsharing.py +++ b/model/study_metadata/study_ipdsharing.py @@ -46,6 +46,13 @@ def to_dict(self): "ipd_sharing_url": self.ipd_sharing_url, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "ipd_sharing": self.ipd_sharing, + "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py index 946704d2..aa3ba44e 100644 --- a/model/study_metadata/study_link.py +++ b/model/study_metadata/study_link.py @@ -36,6 +36,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "title": self.title, + "url": self.url, + "id": self.id, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 06e640d6..27ac6476 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -44,6 +44,15 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "facility": self.facility, + "city": self.city, + "country": self.country, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index f66758e9..237d5b98 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -41,6 +41,13 @@ def to_dict(self): "size": self.size, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "oversight_has_dmc": self.oversight_has_dmc, + "conditions": self.conditions, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index e173cd0b..069f3099 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -38,6 +38,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "name": self.name, + "role": self.role, + "affiliation": self.affiliation, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_reference.py index 113af7b0..cce05886 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_reference.py @@ -38,6 +38,14 @@ def to_dict(self): "created_at": self.created_at, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "identifier": self.identifier, + "citation": self.citation, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index a8ab84f5..9f916611 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -45,6 +45,14 @@ def to_dict(self): "lead_sponsor_name": self.lead_sponsor_name, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "responsible_party_type": self.responsible_party_type, + "responsible_party_investigator_name": self.responsible_party_investigator_name, + "lead_sponsor_name": self.lead_sponsor_name, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 24920e32..dd1ffe78 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -43,6 +43,13 @@ def to_dict(self): "completion_date_type": self.completion_date_type, } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "overall_status": self.overall_status, + "start_date": self.start_date, + } + @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" diff --git a/model/version.py b/model/version.py index 9cff0264..3aa3bb5c 100644 --- a/model/version.py +++ b/model/version.py @@ -4,6 +4,7 @@ from sqlalchemy import Table +import model from model.dataset import Dataset from .db import db @@ -21,6 +22,7 @@ def __init__(self, dataset): self.dataset = dataset self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.version_readme = model.VersionReadme(self) __tablename__ = "version" id = db.Column(db.CHAR(36), primary_key=True) diff --git a/model/version_readme.py b/model/version_readme.py index ec657f54..c04bd1b5 100644 --- a/model/version_readme.py +++ b/model/version_readme.py @@ -2,6 +2,10 @@ class VersionReadme(db.Model): # type: ignore + def __init__(self, version): + self.version = version + self.content = "" + __tablename__ = "version_readme" content = db.Column(db.String, nullable=True) @@ -12,14 +16,14 @@ class VersionReadme(db.Model): # type: ignore def to_dict(self): return { - "content": self.content, + "readme": self.content, } @staticmethod - def from_data(data: dict): - user = VersionReadme() - user.update(data) - return user + def from_data(version, data: dict): + readme = VersionReadme(version) + readme.update(data) + return readme def update(self, data: dict): - self.content = data["content"] + self.content = data["readme"] diff --git a/poetry.lock b/poetry.lock index e322ea82..b9e4bb35 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,26 @@ # This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +[[package]] +name = "alembic" +version = "1.12.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, + {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} +importlib-resources = {version = "*", markers = "python_version < \"3.9\""} +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["python-dateutil"] + [[package]] name = "aniso8601" version = "9.0.1" @@ -658,51 +679,6 @@ docopt = ">=0.6" minilog = ">=2.0" requests = ">=2.28,<3.0" -[[package]] -name = "cryptography" -version = "41.0.5" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] -sdist = ["build"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] - [[package]] name = "debugpy" version = "1.6.7" @@ -1673,19 +1649,6 @@ files = [ {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, ] -[[package]] -name = "jwt" -version = "1.3.1" -description = "JSON Web Token library for Python 3." -optional = false -python-versions = ">= 3.6" -files = [ - {file = "jwt-1.3.1-py3-none-any.whl", hash = "sha256:61c9170f92e736b530655e75374681d4fcca9cfa8763ab42be57353b2b203494"}, -] - -[package.dependencies] -cryptography = ">=3.1,<3.4.0 || >3.4.0" - [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -1731,6 +1694,25 @@ files = [ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, ] +[[package]] +name = "mako" +version = "1.3.0" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, + {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "markdown" version = "3.3.7" @@ -2660,6 +2642,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2667,8 +2650,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2685,6 +2675,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2692,6 +2683,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3266,7 +3258,6 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" -category = "dev" optional = false python-versions = "*" files = [ @@ -3578,4 +3569,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "e3e1b2d0645e5cd7ad0281091d65e85b411eab0ddd7c475762e908bf9c10bdb4" +content-hash = "6baf6175b51ec48a4653b31437eb4d02ff6385d8f973566192dbd31cb9c8c586" diff --git a/pyproject.toml b/pyproject.toml index 2dee3a88..60636010 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ python-dotenv = "^1.0.0" flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" email-validator = "^2.0.0.post2" +alembic = "^1.12.1" [tool.poetry.group.dev.dependencies] diff --git a/sql/init.sql b/sql/init.sql index bd8d6c79..1127e19a 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -243,19 +243,7 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme -CREATE TABLE IF NOT EXISTS "dataset_readme" ( - "id" CHAR(36) NOT NULL, - "content" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NOT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); --- Dumping data for table public.dataset_readme: 1 rows -/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; -INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES - ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 9edd316b..cfcb1cb1 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -297,19 +297,9 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; -- Dumping structure for table public.dataset_readme -CREATE TABLE IF NOT EXISTS "dataset_readme" ( - "id" CHAR(36) NOT NULL, - "content" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NOT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_readme_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); -- Dumping data for table public.dataset_readme: -1 rows /*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; -INSERT INTO "dataset_readme" ("id", "content", "dataset_id") VALUES - ('6473a133-af27-4b6c-a8a0-3fc850d3ab91', 'none', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_readme" ENABLE KEYS */; -- Dumping structure for table public.dataset_record_keys CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 50bc66b3..13099053 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -50,6 +50,23 @@ def test_put_dataset_access_metadata(_logged_in_client): # ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # +def test_get_alternative_identifier(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset alternative identifier content + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + + assert response.status_code == 200 + + def test_post_alternative_identifier(_logged_in_client): """ Given a Flask application configured for testing and a study ID @@ -65,7 +82,7 @@ def test_post_alternative_identifier(_logged_in_client): json=[ { "identifier": "identifier test", - "type": "ark", + "type": "ARK", } ], ) @@ -75,24 +92,7 @@ def test_post_alternative_identifier(_logged_in_client): pytest.global_alternative_identifier_id = response_data[0]["id"] assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ark" - - -def test_get_alternative_identifier(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset alternative identifier content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - - assert response.status_code == 200 + assert response_data[0]["type"] == "ARK" def test_delete_alternative_identifier(_logged_in_client): @@ -685,49 +685,6 @@ def test_put_dataset_publisher_metadata(_logged_in_client): ) -# ------------------- README METADATA ------------------- # -def test_get_dataset_readme_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - readme metadata content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/readme" - ) - - assert response.status_code == 200 - - -def test_put_dataset_readme_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/readme' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - readme metadata content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/readme", - json={ - "content": "This is the readme content", - }, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["content"] == "This is the readme content" - - # ------------------- RECORD KEYS METADATA ------------------- # def test_get_dataset_record_keys_metadata(_logged_in_client): """ @@ -766,8 +723,8 @@ def test_put_dataset_record_keys_metadata(_logged_in_client): assert response.status_code == 201 response_data = json.loads(response.data) - assert response_data["key_type"] == "Record Type" - assert response_data["key_details"] == "Details for Record Keys" + assert response_data["type"] == "Record Type" + assert response_data["details"] == "Details for Record Keys" # ------------------- RELATED ITEM METADATA ------------------- # @@ -792,7 +749,7 @@ def test_get_dataset_related_item_metadata(_logged_in_client): def test_post_dataset_related_item_metadata(_logged_in_client): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset' + When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-item' endpoint is requested (POST) Then check that the response is valid and creates the dataset related item metadata content @@ -820,7 +777,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): "metadata_scheme": "Metadata Scheme", "scheme_type": "Scheme Type", "scheme_uri": "Scheme URI", - "type": "ark", + "type": "ARK", } ], "issue": "Issue", @@ -862,7 +819,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["identifiers"][0]["type"] == "ark" + assert response_data[0]["identifiers"][0]["type"] == "ARK" assert response_data[0]["issue"] == "Issue" assert response_data[0]["last_page"] == "Last Page" assert response_data[0]["number_type"] == "Number Type" From 5e96623609bebc22a57d804b151b07961eadac6b Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 10 Nov 2023 16:22:55 -0800 Subject: [PATCH 355/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20update=20dockerfil?= =?UTF-8?q?e?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Dockerfile | 10 +++++++--- entrypoint.sh | 5 +++++ prod-docker-compose.yaml | 5 ----- 3 files changed, 12 insertions(+), 8 deletions(-) create mode 100644 entrypoint.sh diff --git a/Dockerfile b/Dockerfile index 7be75a98..bfa82a33 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,7 +22,11 @@ COPY core ./core COPY app.py . COPY config.py . -# run database migrations -RUN alembic upgrade head +COPY alembic ./alembic +COPY alembic.ini . -CMD ["flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file +COPY entrypoint.sh . + +RUN chmod +x entrypoint.sh + +ENTRYPOINT ["./entrypoint.sh"] \ No newline at end of file diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100644 index 00000000..3b8006dc --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,5 @@ +#!/bin/sh + +alembic upgrade head + +flask run --host=0.0.0.0 --port=5000 \ No newline at end of file diff --git a/prod-docker-compose.yaml b/prod-docker-compose.yaml index 16667e39..65023a44 100644 --- a/prod-docker-compose.yaml +++ b/prod-docker-compose.yaml @@ -4,11 +4,6 @@ services: build: context: . dockerfile: Dockerfile - entrypoint: - - flask - - run - - --host=0.0.0.0 - - --port=5000 ports: - 5000:5000 environment: From 61629ef351528161fc845a6b110cca73aba580b7 Mon Sep 17 00:00:00 2001 From: aydawka Date: Mon, 13 Nov 2023 09:35:21 -0800 Subject: [PATCH 356/505] fix: minor fix on model classes for minimized version pages --- model/study_metadata/study_design.py | 2 +- model/study_metadata/study_sponsors_collaborators.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 585bf1f6..f4d33679 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -19,7 +19,7 @@ def __init__(self, study: Study): self.design_primary_purpose = None self.design_masking = None self.design_masking_description = None - self.design_who_masked_list = [] + self.design_who_masked_list = None self.phase_list = None self.enrollment_count = None self.enrollment_type = None diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index 9f916611..d06817bf 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -51,6 +51,7 @@ def to_dict_metadata(self): "responsible_party_type": self.responsible_party_type, "responsible_party_investigator_name": self.responsible_party_investigator_name, "lead_sponsor_name": self.lead_sponsor_name, + "collaborator_name": self.collaborator_name } @staticmethod From 8edf54ff344d4543d07ee831c425bef4a4933c26 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Mon, 13 Nov 2023 17:36:05 +0000 Subject: [PATCH 357/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- model/study_metadata/study_sponsors_collaborators.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index d06817bf..f82dc725 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -51,7 +51,7 @@ def to_dict_metadata(self): "responsible_party_type": self.responsible_party_type, "responsible_party_investigator_name": self.responsible_party_investigator_name, "lead_sponsor_name": self.lead_sponsor_name, - "collaborator_name": self.collaborator_name + "collaborator_name": self.collaborator_name, } @staticmethod From 6d63c5aa5bdd70d7aa06702458f5f9b401df61fb Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 13 Nov 2023 16:17:33 -0800 Subject: [PATCH 358/505] wip: caching & dashboard done; cleanup in progress --- apis/__init__.py | 40 +- apis/authentication.py | 11 +- apis/dashboard.py | 137 +++- apis/redcap.py | 12 +- apis/redcap_data/__init__.py | 9 - apis/redcap_data/redcap_project_data.py | 105 --- .../redcap_report_participant_values_data.py | 164 ----- .../redcap_report_participants_data.py | 65 -- .../redcap_report_repeat_surveys_data.py | 71 -- .../redcap_report_survey_completions_data.py | 183 ------ apis/redcap_data_namespace.py | 3 - app.py | 13 +- caching/__init__.py | 9 +- caching/cache.py | 46 -- config.py | 6 +- dev-docker-compose.yaml | 2 + model/study_redcap_project_dashboard.py | 5 +- modules/etl/config/__init__.py | 6 +- modules/etl/config/aireadi_config.py | 605 ++++++++++++------ modules/etl/transforms/module_transform.py | 35 +- modules/etl/transforms/redcap_transform.py | 60 +- modules/etl/vtypes/timeseries.py | 6 +- modules/etl/vtypes/vtype.py | 5 +- notebooks/azure-blob.ipynb | 316 ++++----- poetry.lock | 32 +- pyproject.toml | 1 + 26 files changed, 794 insertions(+), 1153 deletions(-) delete mode 100644 apis/redcap_data/__init__.py delete mode 100644 apis/redcap_data/redcap_project_data.py delete mode 100644 apis/redcap_data/redcap_report_participant_values_data.py delete mode 100644 apis/redcap_data/redcap_report_participants_data.py delete mode 100644 apis/redcap_data/redcap_report_repeat_surveys_data.py delete mode 100644 apis/redcap_data/redcap_report_survey_completions_data.py delete mode 100644 apis/redcap_data_namespace.py delete mode 100644 caching/cache.py diff --git a/apis/__init__.py b/apis/__init__.py index 7881ac5d..2b8fbc42 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -2,7 +2,6 @@ from flask_restx import Api, Resource from apis.dataset_metadata_namespace import api as dataset_metadata_namespace -from apis.redcap_data_namespace import api as redcap_data_namespace from apis.study_metadata_namespace import api as study_metadata_namespace from .authentication import api as authentication @@ -27,19 +26,6 @@ from .file import api as file_api from .participant import api as participants_api from .redcap import api as redcap -from .redcap_data.redcap_project_data import api as redcap_project_data -from .redcap_data.redcap_report_participant_values_data import ( - api as redcap_report_participants_values_data, -) -from .redcap_data.redcap_report_participants_data import ( - api as redcap_report_participants_data, -) -from .redcap_data.redcap_report_repeat_surveys_data import ( - api as redcap_report_repeat_surveys_data, -) -from .redcap_data.redcap_report_survey_completions_data import ( - api as redcap_report_survey_completions_data, -) from .study import api as study_api from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd @@ -68,7 +54,6 @@ __all__ = [ "dataset_metadata_namespace", "study_metadata_namespace", - "redcap_data_namespace", "authentication", "contributors_api", "dataset_api", @@ -109,18 +94,19 @@ "dataset_contributor", "redcap", "dashboard", - "redcap_project_data", - "redcap_report_participants_data", - "redcap_report_participants_values_data", - "redcap_report_repeat_surveys_data", - "redcap_report_survey_completions_data", ] - api.add_namespace(dataset_metadata_namespace) api.add_namespace(study_metadata_namespace) -api.add_namespace(redcap_data_namespace) api.add_namespace(authentication) +api.add_namespace(study_api) +api.add_namespace(file_api) +api.add_namespace(dataset_api) +api.add_namespace(participants_api) +api.add_namespace(contributors_api) +api.add_namespace(user) +api.add_namespace(redcap) +api.add_namespace(dashboard) @api.route("/echo", endpoint="echo") @@ -133,13 +119,3 @@ def get(self): """Returns a simple 'Server Active' message""" return "Server active!" - - -api.add_namespace(study_api) -api.add_namespace(file_api) -api.add_namespace(dataset_api) -api.add_namespace(participants_api) -api.add_namespace(contributors_api) -api.add_namespace(user) -api.add_namespace(redcap) -api.add_namespace(dashboard) diff --git a/apis/authentication.py b/apis/authentication.py index c5d01d98..9394961e 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -134,8 +134,17 @@ def validate_password(instance): format_checker.checks("password")(validate_password) try: - validate(instance=data, schema=schema, format_checker=format_checker) + # Remove the code property for dev purposes + data_no_code = { + "email_address": data["email_address"], + "password": data["password"], + } + validate( + instance=data_no_code, schema=schema, format_checker=format_checker + ) + # validate(instance=data, schema=schema, format_checker=format_checker) except ValidationError as e: + print(e) return e.message, 400 user = model.User.query.filter_by( diff --git a/apis/dashboard.py b/apis/dashboard.py index 19dd052e..c9a3e92e 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -2,21 +2,87 @@ from typing import Any, Union from flask import request + +# from flask_caching import Cache from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate import model +from caching import cache +from modules.etl import ModuleTransform, RedcapTransform +from modules.etl.config import transformConfigs from .authentication import is_granted api = Namespace("Dashboard", description="Dashboard operations", path="/") +datum_model = api.model( + "Datum", + { + "filterby": fields.String( + required=True, readonly=True, description="Filterby field" + ), + "group": fields.String(required=True, readonly=True, description="Group field"), + "color": fields.String(required=True, readonly=True, description="Color field"), + "subgroup": fields.String( + required=False, readonly=True, description="Subgroup field" + ), + "value": fields.Integer( + required=False, readonly=True, description="Value field" + ), + "x": fields.Float(required=False, readonly=True, description="X-axis field"), + "y": fields.Float(required=False, readonly=True, description="Y-axis field"), + "datetime": fields.String( + required=False, readonly=True, description="Date field" + ), + }, +) + +visualization_model = api.model( + "Visualization", + { + "id": fields.String( + required=True, readonly=True, description="Visualization ID" + ), + "data": fields.List( + fields.Nested(datum_model), + required=True, + readonly=True, + description="Visualization data", + ), + }, +) + +redcap_project_dashboard_module_model = api.model( + "RedcapProjectDashboardModule", + { + "name": fields.String( + required=True, readonly=True, description="Dashboard module name" + ), + "id": fields.String( + required=True, readonly=True, description="Dashboard module id" + ), + "reportId": fields.String( + required=True, + readonly=True, + description="Dashboard module associated REDCap report ID", + ), + "selected": fields.Boolean( + required=True, readonly=True, description="Dashboard module is selected" + ), + "visualizations": fields.List( + fields.Nested(visualization_model), + required=True, + readonly=True, + description="Visualizations", + ), + }, +) redcap_project_dashboard_model = api.model( "RedcapProjectDashboard", { - "study_id": fields.String(required=True, description="Study ID"), "project_id": fields.String( - required=True, description="REDCap project ID (pid)" + required=True, readonly=True, description="REDCap project ID (pid)" ), "dashboard_id": fields.String( required=True, readonly=True, description="REDCap dashboard ID" @@ -24,19 +90,28 @@ "dashboard_name": fields.String( required=True, readonly=True, description="REDCap dashboard name" ), - "dashboard_modules": fields.String( - required=True, readonly=True, description="REDCap dashboard name" - ), - "report_ids": fields.String( - required=True, readonly=True, description="REDCap project report IDs" + "dashboard_modules": fields.List( + fields.Nested( + redcap_project_dashboard_module_model, + required=True, + readonly=True, + description="REDCap dashboard module", + ) ), }, ) +# Parser dashboard_parser = reqparse.RequestParser() dashboard_parser.add_argument("dashboard_id", type=str, help="Dashboard ID") +# Dashboard Cache Key +def dashboard_cache_key(): + print(request.get_json()) + return ",".join([f"{key}={value}" for key, value in request.get_json().items()]) + + @api.route("/study//dashboard/all") class RedcapProjectDashboards(Resource): @api.doc("redcap_project_dashboards") @@ -88,7 +163,7 @@ def post(self, study_id: int): { "type": "object", "properties": { - "key": {"type": "string", "minLength": 1}, + "id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "selected": {"type": "boolean"}, "reportId": {"type": "string", "minLength": 0}, @@ -106,8 +181,6 @@ def post(self, study_id: int): except ValidationError as e: print(e) return e.message, 400 - print("moduleslength", len(data["dashboard_modules"])) - if len(data["project_id"]) < 1: return ( f"redcap project_id is required to connect a dashboard: {data['project_id']}", @@ -123,11 +196,6 @@ def post(self, study_id: int): f"dashboard dashboard_modules is required to connect a dashboard: {data['dashboard_name']}", 400, ) - data["dashboard_modules"] = [ - dashboard_module - for dashboard_module in data["dashboard_modules"] - if dashboard_module["selected"] - ] connect_redcap_project_dashboard = model.StudyRedcapProjectDashboard.from_data( study, data ) @@ -145,6 +213,7 @@ class RedcapProjectDashboard(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) + @cache.cached(query_string=True) def get(self, study_id: int): """Get Study Redcap Project Dashboard""" study = model.db.session.query(model.Study).get(study_id) @@ -154,10 +223,30 @@ def get(self, study_id: int): redcap_project_dashboard = model.db.session.query( model.StudyRedcapProjectDashboard ).get(dashboard_id) + # Execute REDCap Merge Transform redcap_project_dashboard = redcap_project_dashboard.to_dict() + project_id = redcap_project_dashboard["project_id"] + redcap_project_view = ( + model.db.session.query(model.StudyRedcapProjectApi) + .get(project_id) + .to_dict() + ) + redcap_etl_config = transformConfigs["redcap"] + redcap_etl_config["redcap_api_url"] = redcap_project_view["project_api_url"] + redcap_etl_config["redcap_api_key"] = redcap_project_view["project_api_key"] + mergedTransform = RedcapTransform(redcap_etl_config).merged + # Execute Dashboard Module Transforms + for dashboard_module in redcap_project_dashboard["dashboard_modules"]: + transform, module_etl_config = transformConfigs[dashboard_module["id"]] + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": getattr(ModuleTransform(module_etl_config), transform)( + mergedTransform + ).transformed, + } + return redcap_project_dashboard, 201 - @api.doc("redcap_project_dashboard") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) @@ -171,12 +260,12 @@ def put(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ - "project_id", + "dashboard_id", "dashboard_name", "dashboard_modules", ], "properties": { - "project_id": {"type": "string", "minLength": 1}, + "dashboard_id": {"type": "string", "minLength": 36, "maxLength": 37}, "dashboard_name": {"type": "string", "minLength": 1}, "dashboard_modules": { "type": "array", @@ -185,7 +274,7 @@ def put(self, study_id: int): { "type": "object", "properties": { - "key": {"type": "string", "minLength": 1}, + "id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "selected": {"type": "boolean"}, "reportId": {"type": "string", "minLength": 0}, @@ -203,11 +292,10 @@ def put(self, study_id: int): except ValidationError as e: print(e) return e.message, 400 - print("moduleslength", len(data["dashboard_modules"])) - if len(data["project_id"]) < 1: + if len(data["dashboard_id"]) < 1: return ( - f"redcap project_id is required to connect a dashboard: {data['project_id']}", + f"dashboard dashboard_id is required to connect a dashboard: {data['dashboard_id']}", 400, ) if len(data["dashboard_name"]) < 1: @@ -220,11 +308,6 @@ def put(self, study_id: int): f"dashboard dashboard_modules is required to connect a dashboard: {data['dashboard_name']}", 400, ) - data["dashboard_modules"] = [ - dashboard_module - for dashboard_module in data["dashboard_modules"] - if dashboard_module["selected"] - ] update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( data["dashboard_id"] ) diff --git a/apis/redcap.py b/apis/redcap.py index 3712992a..b1ed14de 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -179,14 +179,14 @@ def put(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ - "project_title", "project_id", + "project_title", "project_api_url", "project_api_active", ], "properties": { - "project_title": {"type": "string", "minLength": 1}, "project_id": {"type": "string", "minLength": 1, "maxLength": 12}, + "project_title": {"type": "string", "minLength": 1}, "project_api_url": {"type": "string", "minLength": 1}, "project_api_active": {"type": "boolean"}, }, @@ -196,14 +196,14 @@ def put(self, study_id: int): except ValidationError as e: return e.message, 400 - if len(data["project_title"]) < 1: + if len(data["project_id"]) < 1: return ( - f"redcap project_title is required for redcap access: {data['project_title']}", + f"redcap project_id is required for redcap access: {data['project_id']}", 400, ) - if len(data["project_id"]) < 1: + if len(data["project_title"]) < 1: return ( - f"redcap project_id is required for redcap access: {data['project_id']}", + f"redcap project_title is required for redcap access: {data['project_title']}", 400, ) if len(data["project_api_url"]) < 1: diff --git a/apis/redcap_data/__init__.py b/apis/redcap_data/__init__.py deleted file mode 100644 index 4f12cfc0..00000000 --- a/apis/redcap_data/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .redcap_project_data import RedcapProjectDataResource -from .redcap_report_participant_values_data import ( - RedcapReportParticipantValuesDataResource, -) -from .redcap_report_participants_data import RedcapReportParticipantsDataResource -from .redcap_report_repeat_surveys_data import RedcapReportRepeatSurveysDataResource -from .redcap_report_survey_completions_data import ( - RedcapReportSurveyCompletionsDataResource, -) diff --git a/apis/redcap_data/redcap_project_data.py b/apis/redcap_data/redcap_project_data.py deleted file mode 100644 index f2cec382..00000000 --- a/apis/redcap_data/redcap_project_data.py +++ /dev/null @@ -1,105 +0,0 @@ -"""API routes for redcap project""" -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.redcap_data_namespace import api - -from ..authentication import is_granted - -# # REDCap Data Visualization ETL Configuration -# from modules.etl.config import redcapTransformConfig -# from modules.etl.config import sexGenderTransformConfig -# from modules.etl.config import raceEthnicityTransformConfig -# from modules.etl.config import phenotypeTransformConfig -# from modules.etl.config import studyWaypointsTransformConfig - -# # ETL Modules -# from modules.etl import transforms - -# Import In-Memory Cache - -redcap_project_data = api.model( - "RedcapProject", - { - "project_id": fields.String(required=True, readonly=True, description=""), - "project_title": fields.String(required=True, readonly=True, description=""), - "creation_time": fields.String(required=True, readonly=True, description=""), - "production_time": fields.String(required=True, readonly=True, description=""), - "in_production": fields.Boolean(r=True, description=""), - "project_language": fields.String(required=True, readonly=True, description=""), - "purpose": fields.Integer(required=True, readonly=True, description=""), - "purpose_other": fields.Integer(required=True, readonly=True, description=""), - "project_notes": fields.String(required=True, readonly=True, description=""), - "custom_record_label": fields.String( - required=True, readonly=True, description="" - ), - "secondary_unique_field": fields.String( - required=True, readonly=True, description="" - ), - "is_longitudinal": fields.Boolean(required=True, readonly=True, description=""), - "has_repeating_instruments_or_events": fields.Boolean( - required=True, readonly=True, description="" - ), - "surveys_enabled": fields.Boolean(required=True, readonly=True, description=""), - "scheduling_enabled": fields.Boolean( - required=True, readonly=True, description="" - ), - "record_autonumbering_enabled": fields.Boolean( - required=True, readonly=True, description="" - ), - "randomization_enabled": fields.Boolean( - required=True, readonly=True, description="" - ), - "ddp_enabled": fields.Boolean(required=True, readonly=True, description=""), - "project_irb_number": fields.String( - required=True, readonly=True, description="" - ), - "project_grant_number": fields.String( - required=True, readonly=True, description="" - ), - "project_pi_firstname": fields.String( - required=True, readonly=True, description="" - ), - "project_pi_lastname": fields.String( - required=True, readonly=True, description="" - ), - "display_today_now_button": fields.Boolean( - required=True, readonly=True, description="" - ), - "missing_data_codes": fields.String( - required=True, readonly=True, description="" - ), - "external_modules": fields.String(required=True, readonly=True, description=""), - "bypass_branching_erase_field_prompt": fields.Boolean( - required=True, readonly=True, description="" - ), - }, -) - - -@api.route("/study//redcap//project") -class RedcapProjectDataResource(Resource): - @api.doc("project") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_data) - # @cache.cached() - def get(self, study_id: int, redcap_project_id: str): - """ - Get REDCap project - - TODO: Will need to use project_id to query SQL/KeyVault to - get the correct REDCap API URL and token. For now, - we'll just assume we have access through globals. - """ - study_ = model.Study.query.get(study_id) - study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project( - study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] - ) - project = PyCapProject.export_project_info() - return project diff --git a/apis/redcap_data/redcap_report_participant_values_data.py b/apis/redcap_data/redcap_report_participant_values_data.py deleted file mode 100644 index a69cca2e..00000000 --- a/apis/redcap_data/redcap_report_participant_values_data.py +++ /dev/null @@ -1,164 +0,0 @@ -"""API routes for redcap report participant values data""" -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.redcap_data_namespace import api - -from ..authentication import is_granted - -# # REDCap Data Visualization ETL Configuration -# from modules.etl.config import redcapTransformConfig -# from modules.etl.config import sexGenderTransformConfig -# from modules.etl.config import raceEthnicityTransformConfig -# from modules.etl.config import phenotypeTransformConfig -# from modules.etl.config import studyWaypointsTransformConfig - -# # ETL Modules -# from modules.etl import transforms - - -# Import In-Memory Cache -# from __main__ import IN_MEMORY_CACHE - -redcap_report_participant_values_data = api.model( - "RedcapReportParticipantValuesData", - { - "record_id": fields.String( - required=True, readonly=True, description="Participant record ID" - ), - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "siteid": fields.String(required=True, readonly=True, description="Site ID"), - "dm": fields.String( - required=True, readonly=True, description="Data approved for Fairhub.io" - ), - "siteid": fields.String(required=True, readonly=True, description="Site ID"), - "genderid": fields.String( - required=True, readonly=True, description="Gender identity" - ), - "scrsex": fields.String( - required=True, readonly=True, description="Sex at birth" - ), - "race": fields.String(required=True, readonly=True, description="Race"), - "race2": fields.String( - required=True, readonly=True, description="Race further defined" - ), - "ethnic": fields.String(required=True, readonly=True, description="Ethnicity"), - "dvenvyn": fields.String( - required=True, readonly=True, description="Environmental sensor distributed" - ), - "dvenvstdat": fields.String( - required=True, - readonly=True, - description="Date of environmental sensor distribution", - ), - "dvenvcrcid": fields.String( - required=True, - readonly=True, - description="Was environmental sensor demonstrated?", - ), - "dvcgmyn": fields.String( - required=True, - readonly=True, - description="Continuous glucose monitor inserted", - ), - "dvcgmstdat": fields.String( - required=True, - readonly=True, - description="Date of continuous glucose monitor was inserted", - ), - "dvcgmvrfy": fields.String( - required=True, - readonly=True, - description="Continuous glucose monitor initialized and recording?", - ), - "dvamwyn": fields.String( - required=True, - readonly=True, - description="Was the Apple watch sent home with the participant?", - ), - "dvamwstdat": fields.String( - required=True, - readonly=True, - description="Date Apple watch was given to participant", - ), - "dvamwsn": fields.String( - required=True, readonly=True, description="Apple watch serial number" - ), - "dvrtmthd": fields.String( - required=True, readonly=True, description="Planned method of device return" - ), - "dvrtnyn": fields.String( - required=True, - readonly=True, - description="Was the participant given device return instructions and shipping materials?", - ), - "dvrtnship": fields.String( - required=True, readonly=True, description="Return shipping tracking number" - ), - "mhterm_dm1": fields.String( - required=True, readonly=True, description="Type I diabetes" - ), - "mhterm_dm2": fields.String( - required=True, readonly=True, description="Type II diabetes" - ), - "mhterm_predm": fields.String( - required=True, readonly=True, description="Pre-diabetes" - ), - "mh_dm_age": fields.String( - required=True, - readonly=True, - description="Age diagnosed with type II diabetes", - ), - "mh_a1c": fields.String( - required=True, readonly=True, description="Elevated A1C levels" - ), - "cmtrt_a1c": fields.String( - required=True, - readonly=True, - description="Taking pills to control A1C and blood glucose levels?", - ), - "cmtrt_insln": fields.String( - required=True, - readonly=True, - description="Injecting insulin to control blood glucose levels", - ), - "cmtrt_glcs": fields.String( - required=True, - readonly=True, - description="Using other injectables to control blood glucose levels", - ), - "cmtrt_lfst": fields.String( - required=True, - readonly=True, - description="Using lifestyle changes to control blood glucose levels", - ), - "scrcmpdat": fields.String( - required=True, readonly=True, description="Screening survey completion date" - ), - }, -) - - -@api.route("/study//redcap//participant-values") -class RedcapReportParticipantValuesDataResource(Resource): - @api.doc("report_participant_values_data") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_report_participant_values_data) - # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): - study_ = model.Study.query.get(study_id) - study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project( - study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] - ) - participant_values = PyCapProject.export_report( - study_redcap_["redcap_report_id_participant_values"] - ) - return participant_values diff --git a/apis/redcap_data/redcap_report_participants_data.py b/apis/redcap_data/redcap_report_participants_data.py deleted file mode 100644 index eed60162..00000000 --- a/apis/redcap_data/redcap_report_participants_data.py +++ /dev/null @@ -1,65 +0,0 @@ -"""API routes for redcap report participants data data""" -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.redcap_data_namespace import api - -from ..authentication import is_granted - -# # REDCap Data Visualization ETL Configuration -# from modules.etl.config import redcapTransformConfig -# from modules.etl.config import sexGenderTransformConfig -# from modules.etl.config import raceEthnicityTransformConfig -# from modules.etl.config import phenotypeTransformConfig -# from modules.etl.config import studyWaypointsTransformConfig - -# # ETL Modules -# from modules.etl import transforms - - -# Import In-Memory Cache - - -redcap_report_participants_data = api.model( - "RedcapReportParticipantsData", - { - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "dm_inperson_data_validated": fields.Integer( - required=True, - readonly=True, - attribute="dm___i", - description="All data collected and validated through in-person visit", - ), - "dm_device_data_validated": fields.Integer( - required=True, - readonly=True, - attribute="dm___d", - description="All device data entered and validated", - ), - }, -) - - -@api.route("/study//redcap//participants") -class RedcapReportParticipantsDataResource(Resource): - @api.doc("report_participants_data") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_report_participants_data) - # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): - study_ = model.Study.query.get(study_id) - study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project( - study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] - ) - participants = PyCapProject.export_report( - study_redcap_["redcap_report_id_participants"] - ) - return participants diff --git a/apis/redcap_data/redcap_report_repeat_surveys_data.py b/apis/redcap_data/redcap_report_repeat_surveys_data.py deleted file mode 100644 index 79bd9c29..00000000 --- a/apis/redcap_data/redcap_report_repeat_surveys_data.py +++ /dev/null @@ -1,71 +0,0 @@ -"""API routes for redcap report repeat surveys data""" -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.redcap_data_namespace import api - -from ..authentication import is_granted - -# # REDCap Data Visualization ETL Configuration -# from modules.etl.config import redcapTransformConfig -# from modules.etl.config import sexGenderTransformConfig -# from modules.etl.config import raceEthnicityTransformConfig -# from modules.etl.config import phenotypeTransformConfig -# from modules.etl.config import studyWaypointsTransformConfig - -# # ETL Modules -# from modules.etl import transforms - - -# Import In-Memory Cache -# from __main__ import IN_MEMORY_CACHE - -redcap_report_repeat_surveys_data = api.model( - "RedcapReportRepeatSurveysData", - { - "record_id": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "current_medications_complete": fields.String( - required=True, - readonly=True, - description="All data collected and validated through in-person visit", - ), - "redcap_repeat_instrument": fields.String( - required=True, - readonly=True, - description="All device data entered and validated", - ), - "redcap_repeat_instance": fields.String( - required=True, - readonly=True, - description="All device data entered and validated", - ), - }, -) - - -@api.route("/study//redcap//repeat-surveys") -class RedcapReportRepeatSurveysDataResource(Resource): - @api.doc("report_repeat_surveys_data") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_report_repeat_surveys_data) - # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): - study_ = model.Study.query.get(study_id) - study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project( - study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] - ) - repeat_surveys = PyCapProject.export_report( - study_redcap_["redcap_report_id_repeat_surveys"] - ) - return repeat_surveys diff --git a/apis/redcap_data/redcap_report_survey_completions_data.py b/apis/redcap_data/redcap_report_survey_completions_data.py deleted file mode 100644 index 318ba229..00000000 --- a/apis/redcap_data/redcap_report_survey_completions_data.py +++ /dev/null @@ -1,183 +0,0 @@ -"""API routes for redcap report survey completions data""" -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.redcap_data_namespace import api - -from ..authentication import is_granted - -# # REDCap Data Visualization ETL Configuration -# from modules.etl.config import redcapTransformConfig -# from modules.etl.config import sexGenderTransformConfig -# from modules.etl.config import raceEthnicityTransformConfig -# from modules.etl.config import phenotypeTransformConfig -# from modules.etl.config import studyWaypointsTransformConfig - -# # ETL Modules -# from modules.etl import transforms - - -# Import In-Memory Cache -# from __main__ import IN_MEMORY_CACHE - -redcap_report_survey_completions_data = api.model( - "RedcapReportSurveyCompletionsData", - { - "record_id": fields.String( - required=True, readonly=True, description="Participant record ID" - ), - "studyid": fields.String( - required=True, readonly=True, description="Study participant ID" - ), - "screening_survey_complete": fields.String( - required=True, readonly=True, description="Screening survey completed" - ), - "study_enrollment_complete": fields.String( - required=True, readonly=True, description="Study enrollment completed" - ), - "recruitment_survey_complete": fields.String( - required=True, readonly=True, description="Recruitment survey completed" - ), - "faq_survey_complete": fields.String( - required=True, readonly=True, description="FAQ survey completed" - ), - "recruitment_survey_management_complete": fields.String( - required=True, - readonly=True, - description="Recruitment survey management completed", - ), - "device_distribution_complete": fields.String( - required=True, readonly=True, description="Device distribution completed" - ), - "preconsent_survey_complete": fields.String( - required=True, readonly=True, description="Pre-consent survey completed" - ), - "consent_survey_complete": fields.String( - required=True, readonly=True, description="Consent survey completed" - ), - "staff_consent_attestation_survey_complete": fields.String( - required=True, - readonly=True, - description="Staff consent attestation survey completed", - ), - "demographics_survey_complete": fields.String( - required=True, readonly=True, description="Demographics survey completed" - ), - "health_survey_complete": fields.String( - required=True, readonly=True, description="Health survey completed" - ), - "substance_use_survey_complete": fields.String( - required=True, readonly=True, description="Substance use survey completed" - ), - "cesd10_survey_complete": fields.String( - required=True, readonly=True, description="CES-D-10 survey completed" - ), - "paid5_dm_survey_complete": fields.String( - required=True, readonly=True, description="PAID-5 DM survey completed" - ), - "diabetes_survey_complete": fields.String( - required=True, readonly=True, description="Diabetes survey completed" - ), - "dietary_survey_complete": fields.String( - required=True, readonly=True, description="Dietary survey completed" - ), - "ophthalmic_survey_complete": fields.String( - required=True, readonly=True, description="Opthalmic survey completed" - ), - "px_sdoh_combined_survey_complete": fields.String( - required=True, readonly=True, description="PhenX SDOH survey completed" - ), - "px_food_insecurity_survey_complete": fields.String( - required=True, - readonly=True, - description="PhenX Food Insecurity survey completed", - ), - "px_neighborhood_environment_survey_complete": fields.String( - required=True, - readonly=True, - description="PhenX Neighborhood Enviroment survey completed", - ), - "px_racial_ethnic_discrimination_survey_complete": fields.String( - required=True, - readonly=True, - description="PhenX Racial/Ethnic Discrimination survey completed", - ), - "decline_participation_survey_complete": fields.String( - required=True, - readonly=True, - description="Decline participation survey completed", - ), - "meds_assessment_complete": fields.String( - required=True, readonly=True, description="Medications assessment completed" - ), - "driving_record_complete": fields.String( - required=True, readonly=True, description="Driving record completed" - ), - "physical_assessment_complete": fields.String( - required=True, readonly=True, description="Physical assessment completed" - ), - "bcva_complete": fields.String( - required=True, readonly=True, description="BCVA completed" - ), - "photopic_mars_complete": fields.String( - required=True, readonly=True, description="Photopic mars completed" - ), - "mesopic_mars_complete": fields.String( - required=True, readonly=True, description="Mesopic mars completed" - ), - "monofilament_complete": fields.String( - required=True, readonly=True, description="Monofilament completed" - ), - "moca_complete": fields.String( - required=True, readonly=True, description="MOCA instrument completed" - ), - "ecg_complete": fields.String( - required=True, readonly=True, description="ECG completed" - ), - "retinal_imaging_v2_complete": fields.String( - required=True, readonly=True, description="Retinal imaging completed" - ), - "lab_results_complete": fields.String( - required=True, readonly=True, description="Lab results completed" - ), - "device_return_complete": fields.String( - required=True, readonly=True, description="Device return completed" - ), - "specimen_management_complete": fields.String( - required=True, readonly=True, description="Specimen management completed" - ), - "disposition_complete": fields.String( - required=True, - readonly=True, - description="Participant disposition completed", - ), - "data_management_complete": fields.String( - required=True, - readonly=True, - description="Fairhub.io data management completed", - ), - }, -) - - -@api.route("/study//redcap//survey-completions") -class RedcapReportSurveyCompletionsDataResource(Resource): - @api.doc("report_survey_completions_data") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_report_survey_completions_data) - # @IN_MEMORY_CACHE.cached() - def get(self, study_id: int, redcap_project_id: str): - study_ = model.Study.query.get(study_id) - study_redcap_ = study_.study_redcap.to_dict() - PyCapProject = Project( - study_redcap_["redcap_api_url"], study_redcap_["redcap_api_token"] - ) - survey_completions = PyCapProject.export_report( - study_redcap_["redcap_report_id_survey_completions"] - ) - return survey_completions diff --git a/apis/redcap_data_namespace.py b/apis/redcap_data_namespace.py deleted file mode 100644 index 35c88091..00000000 --- a/apis/redcap_data_namespace.py +++ /dev/null @@ -1,3 +0,0 @@ -from flask_restx import Namespace - -api = Namespace("Redcap Data", description="Redcap data caching operations", path="/") diff --git a/app.py b/app.py index ab5f277a..8d164d76 100644 --- a/app.py +++ b/app.py @@ -8,7 +8,6 @@ import jwt from flask import Flask, request from flask_bcrypt import Bcrypt -from flask_caching import Cache from flask_cors import CORS from sqlalchemy import MetaData @@ -17,6 +16,7 @@ from apis import api from apis.authentication import UnauthenticatedException, authentication, authorization from apis.exception import ValidationException +from caching import cache # from pyfairdatatools import __version__ @@ -41,8 +41,9 @@ def create_app(config_module=None): # TODO - fix this # csrf = CSRFProtect() # csrf.init_app(app) - + # print(app.config) app.config.from_prefixed_env("FAIRHUB") + if config.FAIRHUB_SECRET: if len(config.FAIRHUB_SECRET) < 32: raise RuntimeError("FAIRHUB_SECRET must be at least 32 characters long") @@ -57,14 +58,6 @@ def create_app(config_module=None): else: raise RuntimeError("FAIRHUB_DATABASE_URL not set") - cache_config = { - key: value - for key, value in app.config.items() - if (len(key) > 5) and (key[0:5] == "CACHE") - } - cache = Cache(config=cache_config) - - # Moved down here to allow for loading of redis cache prior to API model.db.init_app(app) cache.init_app(app) api.init_app(app) diff --git a/caching/__init__.py b/caching/__init__.py index 11e47a57..671f11ba 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1 +1,8 @@ -from .cache import create_cache +from flask_caching import Cache +from config import config + +cache = Cache(config = { + key.replace(f"FAIRHUB_", ""): value + for key, value in config.items() + if "CACHE" in key +}) diff --git a/caching/cache.py b/caching/cache.py deleted file mode 100644 index 1613c62a..00000000 --- a/caching/cache.py +++ /dev/null @@ -1,46 +0,0 @@ -from flask_caching import Cache - - -def create_cache(app): - cache = None - try: - app.config["CACHE_URL"] = ( - app.config["CACHE_URL"] - if "CACHE_URL" in app.config - else "redis://127.0.0.1:6379" - ) - app.config["CACHE_HOST"] = ( - app.config["CACHE_HOST"] if "CACHE_HOST" in app.config else "localhost" - ) - app.config["CACHE_PORT"] = ( - app.config["CACHE_PORT"] if "CACHE_PORT" in app.config else 6379 - ) - app.config["CACHE_DB"] = ( - app.config["CACHE_DB"] if "CACHE_DB" in app.config else 0 - ) - app.config["CACHE_DEFAULT_TIMEOUT"] = ( - app.config["CACHE_DEFAULT_TIMEOUT"] - if "CACHE_DEFAULT_TIMEOUT" in app.config - else 86400 - ) - app.config["CACHE_KEY_PREFIX"] = ( - app.config["CACHE_KEY_PREFIX"] - if "CACHE_KEY_PREFIX" in app.config - else "fairhub-io#" - ) - - cache = Cache( - config={ - "CACHE_TYPE": "RedisCache", - "CACHE_DEBUG": False, - "CACHE_DEFAULT_TIMEOUT": app.config["CACHE_DEFAULT_TIMEOUT"], - "CACHE_KEY_PREFIX": app.config["CACHE_KEY_PREFIX"], - "CACHE_REDIS_HOST": app.config["CACHE_HOST"], - "CACHE_REDIS_PORT": app.config["CACHE_PORT"], - "CACHE_REDIS_DB": app.config["CACHE_DB"], - "CACHE_REDIS_URL": app.config["CACHE_URL"], - } - ) - except: - raise RuntimeError("Unable to instantiate cache!") - return cache diff --git a/config.py b/config.py index 57cea136..a58920c8 100644 --- a/config.py +++ b/config.py @@ -10,18 +10,16 @@ # Load environment variables from .env config = dotenv_values(".env") - def get_env(key): """Return environment variable from .env or native environment.""" return config.get(key) if LOCAL_ENV_FILE else environ.get(key) - +FLASK_APP = get_env("FLASK_APP") +FLASK_DEBUG = get_env("FLASK_DEBUG") FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") - FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") - FAIRHUB_CACHE_DEFAULT_TIMEOUT = get_env("FAIRHUB_CACHE_DEFAULT_TIMEOUT") FAIRHUB_CACHE_KEY_PREFIX = get_env("FAIRHUB_CACHE_KEY_PREFIX") FAIRHUB_CACHE_HOST = get_env("FAIRHUB_CACHE_HOST") diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 6671b6cb..2879955f 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -13,6 +13,7 @@ services: # - ./app.py:/app/ # - ./config.py:/app/ environment: + FLASK_APP: api.fairhub.io FLASK_ENV: development FLASK_DEBUG: 1 FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/postgres}" @@ -46,6 +47,7 @@ services: CACHE_DB: fairhub CACHE_HOST: localhost CACHE_PORT: 6379 + CACHE_TYPE: RedisCache CACHE_URL: redis://127.0.0.1:6379 CACHE_PREFIX: fairhub-io# CACHE_TIMEOUT: 86400 diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index 14d849ef..0327ef69 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -2,8 +2,7 @@ from dataclasses import dataclass from datetime import datetime, timezone -from sqlalchemy import JSON -from sqlalchemy.dialects.postgresql import ARRAY +from sqlalchemy_json import NestedMutableJson from model import Study @@ -28,7 +27,7 @@ class StudyRedcapProjectDashboard(db.Model): # type: ignore dashboard_id: str = db.Column(db.CHAR(36), primary_key=True) dashboard_name: str = db.Column(db.String, nullable=False) dashboard_modules: list[dict[str, (str | bool | int)]] = db.Column( - ARRAY(JSON), nullable=True + NestedMutableJson, nullable=True ) created_at: int = db.Column(db.BigInteger, nullable=False) updated_on: int = db.Column(db.BigInteger, nullable=False) diff --git a/modules/etl/config/__init__.py b/modules/etl/config/__init__.py index 013db7ee..c863185c 100644 --- a/modules/etl/config/__init__.py +++ b/modules/etl/config/__init__.py @@ -2,7 +2,7 @@ redcapTransformConfig, sexGenderTransformConfig, raceEthnicityTransformConfig, - phenotypeTransformConfig, - studyWaypointsTransformConfig, - mixedTransformTestConfig, + phenotypesTransformConfig, + overviewTransformConfig, + transformConfigs, ) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index cd6ebcb0..655fd249 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -1,13 +1,11 @@ -import os -import dotenv import numpy as np # Load API metadata from .env -dotenv.load_dotenv() +# dotenv.load_dotenv() -# Set REDCap API References -REDCAP_API_TOKEN = os.environ["REDCAP_API_TOKEN"] -REDCAP_API_URL = os.environ["REDCAP_API_URL"] +# # Set REDCap API References +# REDCAP_API_TOKEN = os.environ["REDCAP_API_TOKEN"] +# REDCAP_API_URL = os.environ["REDCAP_API_URL"] # Value assigned to missing values unless other specific value defined on function call # (e.g. REDCapTransform.map_missing_values_by_columns(df, columns, new_missing_value)) @@ -126,7 +124,7 @@ ], ), ( - "dashboard_data_study_waypoints", + "dashboard_data_overview", {"report_id": 251954}, [ ( @@ -156,7 +154,7 @@ "merge_transformed_reports": ( "dashboard_data_generic", [ - ("dashboard_data_study_waypoints", {"on": index_columns, "how": "inner"}), + ("dashboard_data_overview", {"on": index_columns, "how": "inner"}), ( "dashboard_data_repeat_instruments", {"on": index_columns, "how": "outer"}, @@ -278,59 +276,137 @@ ) # Phenotypes -phenotypeTransformConfig = ( - "simpleTransform", +phenotypesTransformConfig = ( + "compoundTransform", { "key": "phenotype", "strict": True, - "transforms": { - "name": "Type II Diabetes", - "vtype": "SingleCategorical", - "method": { - "groups": ["siteid", "mhterm_dm2"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, + "transforms": [ + { + "name": "Prediabetes", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "mhterm_predm"], + "value": "record_id", + "func": "count", }, - "group": { - "name": "Phenotype", - "field": "mhterm_dm2", - "missing_value": missing_value_generic, - "astype": str, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Prediabetes", + "field": "mhterm_predm", + "remap": lambda x: "Yes Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Prediabetes", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Prediabetes", + "field": "mhterm_predm", + "remap": lambda x: "Yes Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Prediabetes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, }, - "color": { - "name": "Phenotype", - "field": "mhterm_dm2", - "missing_value": missing_value_generic, - "astype": str, + }, + { + "name": "Type I Diabetes", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "mhterm_dm1"], + "value": "record_id", + "func": "count", }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Type I Diabetes", + "field": "mhterm_dm1", + "remap": lambda x: "Yes Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type I Diabetes", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Type I Diabetes", + "field": "mhterm_dm1", + "remap": lambda x: "Yes Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type I Diabetes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, }, }, - }, + { + "name": "Type II Diabetes", + "vtype": "SingleCategorical", + "method": { + "groups": ["siteid", "mhterm_dm2"], + "value": "record_id", + "func": "count", + }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Type II Diabetes", + "field": "mhterm_dm2", + "remap": lambda x: "Yes Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type II Diabetes", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Type II Diabetes", + "field": "mhterm_dm2", + "remap": lambda x: "Yes Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type II Diabetes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + } + ], }, ) -# Study Waypoints -studyWaypointsTransformConfig = ( +# Overview +overviewTransformConfig = ( "compoundTransform", { - "key": "study-waypoints", + "key": "overview", "strict": True, "transforms": [ { "name": "Recruitment Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "recruitment_survey_complete"], "value": "record_id", @@ -344,6 +420,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Recruitment Survey", "field": "recruitment_survey_complete", "missing_value": missing_value_generic, @@ -365,7 +448,7 @@ }, { "name": "FAQ Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "faq_survey_complete"], "value": "record_id", @@ -379,6 +462,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "FAQ Survey", "field": "faq_survey_complete", "missing_value": missing_value_generic, @@ -400,7 +490,7 @@ }, { "name": "Screening Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "screening_survey_complete"], "value": "record_id", @@ -414,6 +504,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Screening Survey", "field": "screening_survey_complete", "missing_value": missing_value_generic, @@ -435,7 +532,7 @@ }, { "name": "Preconsent Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "preconsent_survey_complete"], "value": "record_id", @@ -449,6 +546,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Preconsent Survey", "field": "preconsent_survey_complete", "missing_value": missing_value_generic, @@ -470,7 +574,7 @@ }, { "name": "Consent Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "consent_survey_complete"], "value": "record_id", @@ -484,6 +588,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Consent Survey", "field": "consent_survey_complete", "missing_value": missing_value_generic, @@ -505,7 +616,7 @@ }, { "name": "Staff Consent Attestation Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "staff_consent_attestation_survey_complete"], "value": "record_id", @@ -519,6 +630,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Staff Consent Attestation Survey", "field": "staff_consent_attestation_survey_complete", "missing_value": missing_value_generic, @@ -540,7 +658,7 @@ }, { "name": "Demographics Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "demographics_survey_complete"], "value": "record_id", @@ -554,6 +672,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Demographics Survey", "field": "demographics_survey_complete", "missing_value": missing_value_generic, @@ -575,7 +700,7 @@ }, { "name": "Health Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "health_survey_complete"], "value": "record_id", @@ -589,6 +714,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Health Survey", "field": "health_survey_complete", "missing_value": missing_value_generic, @@ -610,7 +742,7 @@ }, { "name": "Substance Use Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "substance_use_survey_complete"], "value": "record_id", @@ -624,6 +756,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Substance Use Survey", "field": "substance_use_survey_complete", "missing_value": missing_value_generic, @@ -645,7 +784,7 @@ }, { "name": "CES-D-10 Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "cesd10_survey_complete"], "value": "record_id", @@ -659,6 +798,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "CES-D-10 Survey", "field": "cesd10_survey_complete", "missing_value": missing_value_generic, @@ -680,7 +826,7 @@ }, { "name": "PAID-5 DM Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "paid5_dm_survey_complete"], "value": "record_id", @@ -694,6 +840,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "PAID-5 DM Survey", "field": "paid5_dm_survey_complete", "missing_value": missing_value_generic, @@ -715,7 +868,7 @@ }, { "name": "Diabetes Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "diabetes_survey_complete"], "value": "record_id", @@ -729,6 +882,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Diabetes Survey", "field": "diabetes_survey_complete", "missing_value": missing_value_generic, @@ -750,7 +910,7 @@ }, { "name": "Dietary Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "dietary_survey_complete"], "value": "record_id", @@ -764,6 +924,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Dietary Survey", "field": "dietary_survey_complete", "missing_value": missing_value_generic, @@ -785,7 +952,7 @@ }, { "name": "Opthalmic Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "ophthalmic_survey_complete"], "value": "record_id", @@ -799,6 +966,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Opthalmic Survey", "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, @@ -820,7 +994,7 @@ }, { "name": "PhenX SDOH Combined Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "px_sdoh_combined_survey_complete"], "value": "record_id", @@ -834,6 +1008,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "PhenX SDOH Combined Survey", "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, @@ -855,7 +1036,7 @@ }, { "name": "PhenX Food Insecurity Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "px_food_insecurity_survey_complete"], "value": "record_id", @@ -869,6 +1050,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "PhenX Food Insecurity Survey", "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, @@ -890,7 +1078,7 @@ }, { "name": "PhenX Neighborhood Environment Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "px_neighborhood_environment_survey_complete"], "value": "record_id", @@ -904,6 +1092,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "PhenX Neighborhood Environment Survey", "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, @@ -925,7 +1120,7 @@ }, { "name": "PhenX Racial and Ethnic Discrimination Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": [ "siteid", @@ -942,6 +1137,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "PhenX Racial and Ethnic Discrimination Survey", "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, @@ -963,7 +1165,7 @@ }, { "name": "Decline Participation Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "decline_participation_survey_complete"], "value": "record_id", @@ -977,6 +1179,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Decline Participation Survey", "field": "decline_participation_survey_complete", "missing_value": missing_value_generic, @@ -998,7 +1207,7 @@ }, { "name": "Study Enrollment Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "study_enrollment_complete"], "value": "record_id", @@ -1012,6 +1221,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Study Enrollment Survey", "field": "study_enrollment_complete", "missing_value": missing_value_generic, @@ -1033,7 +1249,7 @@ }, { "name": "Driving Record", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "driving_record_complete"], "value": "record_id", @@ -1047,6 +1263,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Driving Record", "field": "driving_record_complete", "missing_value": missing_value_generic, @@ -1068,7 +1291,7 @@ }, { "name": "Device Distribution", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "device_distribution_complete"], "value": "record_id", @@ -1082,6 +1305,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Device Distribution", "field": "device_distribution_complete", "missing_value": missing_value_generic, @@ -1103,7 +1333,7 @@ }, { "name": "Medications Assessment", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "meds_assessment_complete"], "value": "record_id", @@ -1117,6 +1347,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Medications Assessment", "field": "meds_assessment_complete", "missing_value": missing_value_generic, @@ -1138,7 +1375,7 @@ }, { "name": "Physical Assessment", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "physical_assessment_complete"], "value": "record_id", @@ -1152,6 +1389,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Physical Assessment", "field": "physical_assessment_complete", "missing_value": missing_value_generic, @@ -1173,7 +1417,7 @@ }, { "name": "BCVA", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "bcva_complete"], "value": "record_id", @@ -1187,6 +1431,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "BCVA", "field": "bcva_complete", "missing_value": missing_value_generic, @@ -1208,7 +1459,7 @@ }, { "name": "Photopic MARS", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "photopic_mars_complete"], "value": "record_id", @@ -1222,6 +1473,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Photopic MARS", "field": "photopic_mars_complete", "missing_value": missing_value_generic, @@ -1243,7 +1501,7 @@ }, { "name": "Mesopic MARS", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "mesopic_mars_complete"], "value": "record_id", @@ -1257,6 +1515,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Mesopic MARS", "field": "mesopic_mars_complete", "missing_value": missing_value_generic, @@ -1278,7 +1543,7 @@ }, { "name": "Monofilament", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "monofilament_complete"], "value": "record_id", @@ -1292,6 +1557,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Monofilament", "field": "monofilament_complete", "missing_value": missing_value_generic, @@ -1313,7 +1585,7 @@ }, { "name": "MOCA", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "moca_complete"], "value": "record_id", @@ -1327,6 +1599,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "MOCA", + "field": "moca_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "MOCA", "field": "moca_complete", "missing_value": missing_value_generic, @@ -1348,7 +1627,7 @@ }, { "name": "ECG Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "ecg_complete"], "value": "record_id", @@ -1362,6 +1641,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "ECG Survey", "field": "ecg_complete", "missing_value": missing_value_generic, @@ -1383,7 +1669,7 @@ }, { "name": "Lab Results Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "lab_results_complete"], "value": "record_id", @@ -1397,6 +1683,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Lab Results Survey", "field": "lab_results_complete", "missing_value": missing_value_generic, @@ -1418,7 +1711,7 @@ }, { "name": "Specimen Management", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "specimen_management_complete"], "value": "record_id", @@ -1432,6 +1725,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Specimen Management", "field": "specimen_management_complete", "missing_value": missing_value_generic, @@ -1453,7 +1753,7 @@ }, { "name": "Device Return", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "device_return_complete"], "value": "record_id", @@ -1467,6 +1767,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Device Return", "field": "device_return_complete", "missing_value": missing_value_generic, @@ -1488,7 +1795,7 @@ }, { "name": "Disposition Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "disposition_complete"], "value": "record_id", @@ -1502,6 +1809,13 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], + "name": "Disposition Survey", + "field": "disposition_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { "name": "Disposition Survey", "field": "disposition_complete", "missing_value": missing_value_generic, @@ -1523,7 +1837,7 @@ }, { "name": "Data Management Survey", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "method": { "groups": ["siteid", "data_management_complete"], "value": "record_id", @@ -1537,141 +1851,21 @@ "astype": str, }, "group": { + "remap": lambda x: x["name"], "name": "Data Management Survey", "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, - "color": { + "subgroup": { "name": "Data Management Survey", "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - ], - }, -) - -# Sex & Gender Counts by Site -mixedTransformTestConfig = ( - "mixedTransform", - { - "key": "mixed-transform-test", - "strict": True, - "transforms": [ - { - "name": "Sex & Gender", - "vtype": "DoubleCategorical", - "method": { - "groups": ["siteid", "scrsex", "genderid"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Gender", - "field": "genderid", - "missing_value": missing_value_generic, - "astype": str, - }, "color": { - "name": "Gender", - "field": "genderid", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Race & Ethnicity", - "vtype": "DoubleCategorical", - "method": { - "groups": ["siteid", "race", "ethnic"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Ethnicity", - "field": "ethnic", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Ethnicity", - "field": "ethnic", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Type II Diabetes", - "vtype": "SingleCategorical", - "method": { - "groups": ["siteid", "mhterm_dm2"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Phenotype", - "field": "mhterm_dm2", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Phenotype", - "field": "mhterm_dm2", + "name": "Data Management Survey", + "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1686,3 +1880,10 @@ ], }, ) +transformConfigs = { + "redcap": redcapTransformConfig, + "overview": overviewTransformConfig, + "phenotypes": phenotypesTransformConfig, + "devices": phenotypesTransformConfig, + "recruitment": phenotypesTransformConfig, +} diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index 24f0a4db..ff59b1bb 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -10,7 +10,7 @@ class ModuleTransform(object): def __init__( - self: object, + self, config: Dict[str, Dict[str, Any]], logging_config: Dict[str, str] = {}, ) -> None: @@ -76,7 +76,7 @@ def __init__( return - def _transformIsValid(self: object, transform: Tuple[int, Dict[str, Any]]) -> bool: + def _transformIsValid(self, transform: Tuple[int, Dict[str, Any]]) -> bool: """ Transform validator """ @@ -105,11 +105,12 @@ def _transformIsValid(self: object, transform: Tuple[int, Dict[str, Any]]) -> bo return valid def _setValueType( - self: object, + self, vtype: Any, + name: str, record: Dict[str, Any], key: str, - accessor: Dict[str, Dict[str, str | Callable]], + accessors: List[Dict[str, Dict[str, str | Callable]]], ) -> Any: """ Element-wise type setting method. If value of @@ -117,6 +118,7 @@ def _setValueType( value as the type defined for property in the vtype. """ + accessor = accessors[key] for pname, _ptype in vtype.props: if pname == key: # Accessor Typing @@ -131,6 +133,14 @@ def _setValueType( ) # Accessor Name pvalue = record[accessor["field"]] + if "remap" in accessor and accessor["remap"] is not None: + pvalue = accessor["remap"]({ + "name": name, + "record": record, + "value": pvalue, + "key": key, + "accessors": accessors, + }) if pvalue != accessor["missing_value"]: try: pvalue = ptype(pvalue) @@ -148,7 +158,7 @@ def _setValueType( return pvalue - def simpleTransform(self: object, df: pd.DataFrame) -> object: + def simpleTransform(self, df: pd.DataFrame) -> object: """ Performs a pd.DataFrame.groupby transform. The df is first subset to the relevant fields. A @@ -179,7 +189,7 @@ def simpleTransform(self: object, df: pd.DataFrame) -> object: for record in transformed.to_dict("records"): record = { - key: self._setValueType(vtype, record, key, accessor) + key: self._setValueType(vtype, name, record, key, accessor) for key, accessor in accessors.items() } record = {"name": name} | record @@ -194,7 +204,7 @@ def simpleTransform(self: object, df: pd.DataFrame) -> object: return self - def compoundTransform(self: object, df: pd.DataFrame) -> object: + def compoundTransform(self, df: pd.DataFrame) -> object: """ For each transform, performs a pd.DataFrame.groupby transform. The df is first subset to the relevant @@ -214,7 +224,7 @@ def compoundTransform(self: object, df: pd.DataFrame) -> object: transform["name"], getattr(vtypes, transform["vtype"])(), transform["method"], - transform["accessors"], + transform["accessors"] ) if vtype.isvalid(df, accessors): temp = df[ @@ -225,8 +235,9 @@ def compoundTransform(self: object, df: pd.DataFrame) -> object: transformed = getattr(grouped, func)() for record in transformed.to_dict("records"): + print(name, record, accessors, "\n") record = { - key: self._setValueType(vtype, record, key, accessor) + key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() } record = {"name": name} | record @@ -241,7 +252,7 @@ def compoundTransform(self: object, df: pd.DataFrame) -> object: return self - def mixedTransform(self: object, df: pd.DataFrame) -> object: + def mixedTransform(self, df: pd.DataFrame) -> object: """ For each transform, performs a pd.DataFrame.groupby transform. The df is first subset to the relevant @@ -261,7 +272,7 @@ def mixedTransform(self: object, df: pd.DataFrame) -> object: transform["name"], getattr(vtypes, transform["vtype"])(), transform["method"], - transform["accessors"], + transform["accessors"] ) if vtype.isvalid(df, accessors): temp = df[ @@ -274,7 +285,7 @@ def mixedTransform(self: object, df: pd.DataFrame) -> object: subtransform = [] for record in transformed.to_dict("records"): record = { - key: self._setValueType(vtype, record, key, accessor) + key: self._setValueType(vtype, name, record, key, accessor) for key, accessor in accessors.items() } record = {"name": name} | record diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 6e6feeaf..93cf34d5 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -181,27 +181,27 @@ def __init__(self, config: dict) -> None: # Getters # - def get_report_id(self: object, report_name: str) -> str: + def get_report_id(self, report_name: str) -> str: """ Returns a str instance of the REDCap report ID. """ return self.reports[report_name]["id"] def get_report_pycap( - self: object, report_name: str + self, report_name: str ) -> Union[List[Dict[str, Any]], str, pd.DataFrame]: """ Returns a PyCap Report object containing the report. """ return self.reports[report_name]["report"] - def get_report_df(self: object, report_name: str) -> pd.DataFrame: + def get_report_df(self, report_name: str) -> pd.DataFrame: """ Returns a pd.DataFrame instance containing the report. """ return self.reports[report_name]["df"] - def get_report_transformed_df(self: object, report_name: str) -> pd.DataFrame: + def get_report_transformed_df(self, report_name: str) -> pd.DataFrame: """ Returns a pd.DataFrame instance containing the report with normalization transforms applied. @@ -209,7 +209,7 @@ def get_report_transformed_df(self: object, report_name: str) -> pd.DataFrame: return self.reports[report_name]["transformed"] def get_report_transforms( - self: object, report_name: str + self, report_name: str ) -> List[Tuple[str, Dict[str, Any]]]: """ Returns a list of transforms that will be applied to @@ -217,7 +217,7 @@ def get_report_transforms( """ return self.reports[report_name]["transforms"] - def get_report_annotations(self: object, report_name: str) -> List[Dict[str, Any]]: + def get_report_annotations(self, report_name: str) -> List[Dict[str, Any]]: """ Returns a list of annotations generated from the REDCap metadata API call. @@ -229,7 +229,7 @@ def get_report_annotations(self: object, report_name: str) -> List[Dict[str, Any # # Applies Declared Transforms to Reports - def _apply_report_transforms(self: object, report_name: str) -> None: + def _apply_report_transforms(self, report_name: str) -> None: """ Interal method that applies the transforms to each report as an idempotent transform stack. @@ -247,7 +247,7 @@ def _apply_report_transforms(self: object, report_name: str) -> None: return self def apply_transform( - self: object, + self, df: pd.DataFrame, transform_name: str, transform_kwdargs: Dict[str, Any] = {}, @@ -263,7 +263,7 @@ def apply_transform( # def _drop_columns( - self: object, + self, df: pd.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], @@ -274,7 +274,7 @@ def _drop_columns( @classmethod def drop_columns( - self: object, df: pd.DataFrame, columns: List[str] + self, df: pd.DataFrame, columns: List[str] ) -> pd.DataFrame: """ Drop columns from pd.DataFrame. @@ -286,7 +286,7 @@ def drop_columns( # def _keep_columns( - self: object, + self, df: pd.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], @@ -299,7 +299,7 @@ def _keep_columns( @classmethod def keep_columns( - self: object, df: pd.DataFrame, columns: List[str] + self, df: pd.DataFrame, columns: List[str] ) -> pd.DataFrame: """ Keep only selected columns in pd.DataFrame. @@ -311,7 +311,7 @@ def keep_columns( # def _append_column_suffix( - self: object, + self, df: pd.DataFrame, columns: List[str] = [], suffix: str = "", @@ -326,7 +326,7 @@ def _append_column_suffix( @classmethod def append_column_suffix( - self: object, + self, df: pd.DataFrame, columns: List[str] = [], suffix: str = "", @@ -349,7 +349,7 @@ def append_column_suffix( # def _prepend_column_prefix( - self: object, + self, df: pd.DataFrame, columns: List[str] = [], prefix: str = "", @@ -364,7 +364,7 @@ def _prepend_column_prefix( @classmethod def prepend_column_prefix( - self: object, + self, df: pd.DataFrame, columns: List[str] = [], prefix: str = "", @@ -387,7 +387,7 @@ def prepend_column_prefix( # def _remap_values_by_columns( - self: object, + self, df: pd.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, @@ -427,7 +427,7 @@ def _remap_values_by_columns( @classmethod def remap_values_by_columns( - self: object, + self, df: pd.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, @@ -463,7 +463,7 @@ def remap_values_by_columns( # def _map_missing_values_by_columns( - self: object, + self, df: pd.DataFrame, columns: List[str], missing_value: Any = None, @@ -484,7 +484,7 @@ def _map_missing_values_by_columns( @classmethod def map_missing_values_by_columns( - self: object, df: pd.DataFrame, columns: List[str], missing_value: Any + self, df: pd.DataFrame, columns: List[str], missing_value: Any ) -> pd.DataFrame: """ Replace 0-length values or values with keys in @@ -503,7 +503,7 @@ def map_missing_values_by_columns( # def _drop_rows( - self: object, + self, df: pd.DataFrame, columns: List[str] = [], condition: Callable = lambda column: column == "", @@ -515,7 +515,7 @@ def _drop_rows( @classmethod def drop_rows( - self: object, + self, df: pd.DataFrame, columns: List[str], condition: Callable = lambda column: column == "", @@ -534,7 +534,7 @@ def drop_rows( # def _aggregate_repeat_instrument_column_by_index( - self: object, + self, df: pd.DataFrame, aggregator: Callable = "max", dtype: Callable = float, @@ -557,7 +557,7 @@ def _aggregate_repeat_instrument_column_by_index( @classmethod def aggregate_repeat_instrument_by_index( - self: object, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float + self, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float ) -> pd.DataFrame: """ Pre-processing REDCap repeat_instrument so each instrument @@ -574,7 +574,7 @@ def aggregate_repeat_instrument_by_index( # def _merge_reports( - self: object, + self, receiving_report_name: str, merge_steps: List[Tuple[str, Dict[str, Any]]], ) -> pd.DataFrame: @@ -603,7 +603,7 @@ def _merge_reports( # Transform Prelude - Get Applicable Transform Columns def _resolve_columns_with_dataframe( - self: object, df: pd.DataFrame, columns: List[str] + self, df: pd.DataFrame, columns: List[str] ) -> List[str]: """ Internal utility function. Uses set logic to ensure @@ -635,7 +635,7 @@ def _resolve_columns_with_dataframe( # Extract REDCap Type Metadata def _get_redcap_type_metadata( - self: object, df: pd.DataFrame + self, df: pd.DataFrame ) -> List[Dict[str, Any]]: """ Extracts REDCap field name, type, and options (the @@ -713,7 +713,7 @@ def _get_redcap_type_metadata( # Export Untransformed (Raw) Reports def export_raw( - self: object, path: str = "", separator: str = "\t", filetype: str = ".tsv" + self, path: str = "", separator: str = "\t", filetype: str = ".tsv" ) -> object: for report_name, report_object in self.reports.items(): filename = f"{report_name}_raw{filetype}" @@ -729,7 +729,7 @@ def export_raw( # Export Transformed Reports def export_transformed( - self: object, path: str = "", separator: str = "\t", filetype: str = ".tsv" + self, path: str = "", separator: str = "\t", filetype: str = ".tsv" ) -> object: for report_name, report_object in self.reports.items(): filename = f"{report_name}_transformed{filetype}" @@ -745,7 +745,7 @@ def export_transformed( # Export Merged Transforms def export_merged_transformed( - self: object, path: str = "", separator: str = "\t", filetype: str = ".tsv" + self, path: str = "", separator: str = "\t", filetype: str = ".tsv" ) -> object: filename = f"transformed-merged_redcap-extract{filetype}" filepath = os.path.join(path, filename) diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index 7bb4b3fa..939db07b 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -7,7 +7,7 @@ class SingleTimeseries(VType): def __init__(self: object) -> None: super().__init__( "SingleTimeseries", - [("filterby", str), ("subgroup", str), ("color", str), ("x", datetime)], + [("filterby", str), ("subgroup", str), ("color", str), ("datetime", datetime)], pd._libs.tslibs.nattype.NaTType, ) @@ -20,7 +20,7 @@ def __init__(self: object) -> None: ("filterby", str), ("subgroup", str), ("color", str), - ("x", datetime), + ("datetime", str), ("y", int), ], pd._libs.tslibs.nattype.NaTType, @@ -35,7 +35,7 @@ def __init__(self: object) -> None: ("filterby", str), ("subgroup", str), ("color", str), - ("x", datetime), + ("datetime", str), ("y", float), ], pd._libs.tslibs.nattype.NaTType, diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index e7775ed0..2e3a1a9e 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -13,13 +13,14 @@ def __init__( self.name = name self.props = props self.missing_value = missing_value - self.validation_errors = [] + # References + self.validation_errors: List[str] = [] def __str__(self): return f"{self.__dict__}" def isvalid( - self: object, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]] + self, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]] ) -> bool: columns = df.columns for pname, ptype in self.props: diff --git a/notebooks/azure-blob.ipynb b/notebooks/azure-blob.ipynb index 9925f1fd..a05962eb 100644 --- a/notebooks/azure-blob.ipynb +++ b/notebooks/azure-blob.ipynb @@ -1,161 +1,161 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from dotenv import dotenv_values\n", - "import datetime\n", - "import requests\n", - "import base64\n", - "import hmac\n", - "import hashlib\n", - "from pprint import pprint\n", - "\n", - "# Load environment variables from .env\n", - "config = dotenv_values(\".env\")\n", - "\n", - "if not config.get(\"AZURE_STORAGE_ACCOUNT_NAME\"):\n", - " raise ValueError(\"AZURE_STORAGE_ACCOUNT_NAME is not set\")\n", - "\n", - "AZURE_STORAGE_ACCOUNT_NAME = config.get(\"AZURE_STORAGE_ACCOUNT_NAME\")\n", - "\n", - "if not config.get(\"AZURE_ACCESS_KEY\"):\n", - " raise ValueError(\"AZURE_ACCESS_KEY is not set\")\n", - "\n", - "AZURE_ACCESS_KEY = config.get(\"AZURE_ACCESS_KEY\")\n", - "\n", - "if not config.get(\"AZURE_SAS_TOKEN\"):\n", - " raise ValueError(\"AZURE_SAS_TOKEN is not set\")\n", - "\n", - "AZURE_SAS_TOKEN = config.get(\"AZURE_SAS_TOKEN\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# connection via SAS token\n", - "\n", - "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", - "storage_account_sas_token = AZURE_SAS_TOKEN\n", - "api_version = \"2023-08-03\"\n", - "request_time = datetime.datetime.utcnow().strftime(\"%a, %d %b %Y %H:%M:%S GMT\")\n", - "\n", - "containerName = \"ucsd-pilot\"\n", - "\n", - "url = f\"https://{storage_account_name}.dfs.core.windows.net/{containerName}?directory=Cirrus/4001&recursive=false&resource=filesystem&{storage_account_sas_token}\"\n", - "# url = f\"https://{storage_account_name}.dfs.core.windows.net/{folderName}?recursive=false&resource=filesystem&prefix=CGM%2F&delimiter=%2F&{storage_account_sas_token}\"\n", - "\n", - "headers = {\n", - " \"x-ms-date\": request_time,\n", - " \"x-ms-version\": api_version,\n", - "}\n", - "\n", - "response = requests.get(url, headers=headers)\n", - "# print(response.text)\n", - "pprint(response.json())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# connection via shared key\n", - "\n", - "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", - "storage_account_key = AZURE_ACCESS_KEY\n", - "api_version = '2023-08-03'\n", - "request_time = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')\n", - "\n", - "\n", - "string_params = {\n", - " 'Verb': 'GET',\n", - " 'Content-Encoding': '',\n", - " 'Content-Language': '',\n", - " 'Content-Length': '',\n", - " 'Content-MD5': '',\n", - " 'Content-Type': '',\n", - " 'Date': '',\n", - " 'If-Modified-Since': '',\n", - " 'If-Match': '',\n", - " 'If-None-Match': '',\n", - " 'If-Unmodified-Since': '',\n", - " 'Range': '',\n", - " 'CanonicalizedHeaders': 'x-ms-date:' + request_time + '\\nx-ms-version:' + api_version + '\\n',\n", - " 'CanonicalizedResource': '/' + storage_account_name + '/\\ncomp:properties\\nrestype:service'\n", - "}\n", - "\n", - "string_to_sign = (string_params['Verb'] + '\\n'\n", - " + string_params['Content-Encoding'] + '\\n'\n", - " + string_params['Content-Language'] + '\\n'\n", - " + string_params['Content-Length'] + '\\n'\n", - " + string_params['Content-MD5'] + '\\n'\n", - " + string_params['Content-Type'] + '\\n'\n", - " + string_params['Date'] + '\\n'\n", - " + string_params['If-Modified-Since'] + '\\n'\n", - " + string_params['If-Match'] + '\\n'\n", - " + string_params['If-None-Match'] + '\\n'\n", - " + string_params['If-Unmodified-Since'] + '\\n'\n", - " + string_params['Range'] + '\\n'\n", - " + string_params['CanonicalizedHeaders']\n", - " + string_params['CanonicalizedResource'])\n", - "\n", - "def _sign_string(key, string_to_sign):\n", - " key = base64.b64decode(key.encode('utf-8'))\n", - " string_to_sign = string_to_sign.encode('utf-8')\n", - " signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)\n", - " digest = signed_hmac_sha256.digest()\n", - " encoded_digest = base64.b64encode(digest).decode('utf-8')\n", - " return encoded_digest\n", - "\n", - "# signed_string = base64.b64encode(hmac.new(base64.b64decode(storage_account_key), msg=string_to_sign.encode('utf-8'), digestmod=hashlib.sha256).digest()).decode('utf-8')\n", - "# print(signed_string)\n", - "\n", - "signed_string = _sign_string(storage_account_key, string_to_sign=string_to_sign)\n", - "\n", - "headers = {\n", - " 'x-ms-date' : request_time,\n", - " 'x-ms-version' : api_version,\n", - " # 'Content-Length': \"\",\n", - " 'Authorization' : f\"SharedKey {storage_account_name}:{signed_string}\"\n", - "}\n", - "\n", - "dns_suffix = 'dfs.core.windows.net'\n", - "folderName = 'logging'\n", - "\n", - "url = f'https://{storage_account_name}.{dns_suffix}/{folderName}?resource=filesystem'\n", - "\n", - "response = requests.get(url, headers=headers)\n", - "pprint(response.json())" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "fairhub-api-dev-env", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.18" - } + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from dotenv import dotenv_values\n", + "import datetime\n", + "import requests\n", + "import base64\n", + "import hmac\n", + "import hashlib\n", + "from pprint import pprint\n", + "\n", + "# Load environment variables from .env\n", + "config = dotenv_values(\".env\")\n", + "\n", + "if not config.get(\"AZURE_STORAGE_ACCOUNT_NAME\"):\n", + " raise ValueError(\"AZURE_STORAGE_ACCOUNT_NAME is not set\")\n", + "\n", + "AZURE_STORAGE_ACCOUNT_NAME = config.get(\"AZURE_STORAGE_ACCOUNT_NAME\")\n", + "\n", + "if not config.get(\"AZURE_ACCESS_KEY\"):\n", + " raise ValueError(\"AZURE_ACCESS_KEY is not set\")\n", + "\n", + "AZURE_ACCESS_KEY = config.get(\"AZURE_ACCESS_KEY\")\n", + "\n", + "if not config.get(\"AZURE_SAS_TOKEN\"):\n", + " raise ValueError(\"AZURE_SAS_TOKEN is not set\")\n", + "\n", + "AZURE_SAS_TOKEN = config.get(\"AZURE_SAS_TOKEN\")" + ] }, - "nbformat": 4, - "nbformat_minor": 2 + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# connection via SAS token\n", + "\n", + "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", + "storage_account_sas_token = AZURE_SAS_TOKEN\n", + "api_version = \"2023-08-03\"\n", + "request_time = datetime.datetime.utcnow().strftime(\"%a, %d %b %Y %H:%M:%S GMT\")\n", + "\n", + "containerName = \"ucsd-pilot\"\n", + "\n", + "url = f\"https://{storage_account_name}.dfs.core.windows.net/{containerName}?directory=Cirrus/4001&recursive=false&resource=filesystem&{storage_account_sas_token}\"\n", + "# url = f\"https://{storage_account_name}.dfs.core.windows.net/{folderName}?recursive=false&resource=filesystem&prefix=CGM%2F&delimiter=%2F&{storage_account_sas_token}\"\n", + "\n", + "headers = {\n", + " \"x-ms-date\": request_time,\n", + " \"x-ms-version\": api_version,\n", + "}\n", + "\n", + "response = requests.get(url, headers=headers)\n", + "# print(response.text)\n", + "pprint(response.json())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# connection via shared key\n", + "\n", + "storage_account_name = AZURE_STORAGE_ACCOUNT_NAME\n", + "storage_account_key = AZURE_ACCESS_KEY\n", + "api_version = '2023-08-03'\n", + "request_time = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')\n", + "\n", + "\n", + "string_params = {\n", + " 'Verb': 'GET',\n", + " 'Content-Encoding': '',\n", + " 'Content-Language': '',\n", + " 'Content-Length': '',\n", + " 'Content-MD5': '',\n", + " 'Content-Type': '',\n", + " 'Date': '',\n", + " 'If-Modified-Since': '',\n", + " 'If-Match': '',\n", + " 'If-None-Match': '',\n", + " 'If-Unmodified-Since': '',\n", + " 'Range': '',\n", + " 'CanonicalizedHeaders': 'x-ms-date:' + request_time + '\\nx-ms-version:' + api_version + '\\n',\n", + " 'CanonicalizedResource': '/' + storage_account_name + '/\\ncomp:properties\\nrestype:service'\n", + "}\n", + "\n", + "string_to_sign = (string_params['Verb'] + '\\n'\n", + " + string_params['Content-Encoding'] + '\\n'\n", + " + string_params['Content-Language'] + '\\n'\n", + " + string_params['Content-Length'] + '\\n'\n", + " + string_params['Content-MD5'] + '\\n'\n", + " + string_params['Content-Type'] + '\\n'\n", + " + string_params['Date'] + '\\n'\n", + " + string_params['If-Modified-Since'] + '\\n'\n", + " + string_params['If-Match'] + '\\n'\n", + " + string_params['If-None-Match'] + '\\n'\n", + " + string_params['If-Unmodified-Since'] + '\\n'\n", + " + string_params['Range'] + '\\n'\n", + " + string_params['CanonicalizedHeaders']\n", + " + string_params['CanonicalizedResource'])\n", + "\n", + "def _sign_string(key, string_to_sign):\n", + " key = base64.b64decode(key.encode('utf-8'))\n", + " string_to_sign = string_to_sign.encode('utf-8')\n", + " signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256)\n", + " digest = signed_hmac_sha256.digest()\n", + " encoded_digest = base64.b64encode(digest).decode('utf-8')\n", + " return encoded_digest\n", + "\n", + "# signed_string = base64.b64encode(hmac.new(base64.b64decode(storage_account_key), msg=string_to_sign.encode('utf-8'), digestmod=hashlib.sha256).digest()).decode('utf-8')\n", + "# print(signed_string)\n", + "\n", + "signed_string = _sign_string(storage_account_key, string_to_sign=string_to_sign)\n", + "\n", + "headers = {\n", + " 'x-ms-date' : request_time,\n", + " 'x-ms-version' : api_version,\n", + " # 'Content-Length': \"\",\n", + " 'Authorization' : f\"SharedKey {storage_account_name}:{signed_string}\"\n", + "}\n", + "\n", + "dns_suffix = 'dfs.core.windows.net'\n", + "folderName = 'logging'\n", + "\n", + "url = f'https://{storage_account_name}.{dns_suffix}/{folderName}?resource=filesystem'\n", + "\n", + "response = requests.get(url, headers=headers)\n", + "pprint(response.json())" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "fairhub-api-dev-env", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 } diff --git a/poetry.lock b/poetry.lock index 7e99f546..438a3186 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1789,16 +1789,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2441,8 +2431,6 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -3355,6 +3343,24 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "sqlalchemy-json" +version = "0.7.0" +description = "JSON type with nested change tracking for SQLAlchemy" +category = "main" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "sqlalchemy-json-0.7.0.tar.gz", hash = "sha256:620d0b26f648f21a8fa9127df66f55f83a5ab4ae010e5397a5c6989a08238561"}, + {file = "sqlalchemy_json-0.7.0-py3-none-any.whl", hash = "sha256:27881d662ca18363a4ac28175cc47ea2a6f2bef997ae1159c151026b741818e6"}, +] + +[package.dependencies] +sqlalchemy = ">=0.7" + +[package.extras] +dev = ["pytest"] + [[package]] name = "stack-data" version = "0.6.3" @@ -3819,4 +3825,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.10.12,<3.13" -content-hash = "93ee3c0f5e7b66ce63f0b9a6c6d883aa8d4be5729f76b89b61f16dd9525bdf58" +content-hash = "dcb084e9020606373596bffd6d8275f29b98e0d37c7a0ba99882b52c9756840a" diff --git a/pyproject.toml b/pyproject.toml index 231652a3..f1a0e6ec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ pycap = "^2.4.0" redis = "^5.0.1" numpy = "^1.26.1" pandas = "^2.1.1" +sqlalchemy-json = "^0.7.0" [tool.poetry.group.dev.dependencies] From 7bd08a1d160ebc37cc00e904e3ffd5b3bc968bed Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 13:03:55 -0800 Subject: [PATCH 359/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20deploy=20api=20to?= =?UTF-8?q?=20Azure=20Container=20Registry=20(#25)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👷 ci: deoploy api to ACR * 👷 ci: deploy api to ACR * 👷 ci: update build conditions * 👷 ci: remove test branch --- .github/workflows/build-staging.yml | 41 +++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .github/workflows/build-staging.yml diff --git a/.github/workflows/build-staging.yml b/.github/workflows/build-staging.yml new file mode 100644 index 00000000..7a6b9984 --- /dev/null +++ b/.github/workflows/build-staging.yml @@ -0,0 +1,41 @@ +name: Build and push Docker image to Azure Container Registry (staging) + +on: + push: + branches: + - staging + pull_request: + types: [closed] + branches: + - staging + workflow_dispatch: + +jobs: + build: + name: Build and push Docker image + if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.merged == true) + runs-on: ubuntu-latest + env: + FAIRHUB_DATABASE_URL: ${{ secrets.FAIRHUB_STAGING_DATABASE_URL }} + AZURE_REGISTRY_LOGIN_SERVER: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} + AZURE_REGISTRY_USERNAME: ${{ secrets.AZURE_REGISTRY_USERNAME }} + AZURE_REGISTRY_PASSWORD: ${{ secrets.AZURE_REGISTRY_PASSWORD }} + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Login to Azure Container Registry + uses: azure/docker-login@v1 + with: + login-server: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} + username: ${{ secrets.AZURE_REGISTRY_USERNAME }} + password: ${{ secrets.AZURE_REGISTRY_PASSWORD }} + + - name: Build and push Docker image + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile + push: true + tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging \ No newline at end of file From 27e5ee2cdcbca38ce52798d31a94ea1264857111 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 13:18:21 -0800 Subject: [PATCH 360/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20add=20sha=20tag=20?= =?UTF-8?q?to=20build=20image?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-staging.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-staging.yml b/.github/workflows/build-staging.yml index 7a6b9984..1817f468 100644 --- a/.github/workflows/build-staging.yml +++ b/.github/workflows/build-staging.yml @@ -32,10 +32,14 @@ jobs: username: ${{ secrets.AZURE_REGISTRY_USERNAME }} password: ${{ secrets.AZURE_REGISTRY_PASSWORD }} + - name: Get GitHub short SHA + id: git_sha + run: echo ::set-output name=sha::$(git rev-parse --short ${{ github.sha }}) + - name: Build and push Docker image uses: docker/build-push-action@v2 with: context: . file: ./Dockerfile push: true - tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging \ No newline at end of file + tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:{${{ steps.git_sha.outputs.sha }} \ No newline at end of file From 20814d48e50330bad0009562938a405ca9c33d28 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 13:19:40 -0800 Subject: [PATCH 361/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20remove=20typo?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-staging.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-staging.yml b/.github/workflows/build-staging.yml index 1817f468..d82ad01e 100644 --- a/.github/workflows/build-staging.yml +++ b/.github/workflows/build-staging.yml @@ -42,4 +42,4 @@ jobs: context: . file: ./Dockerfile push: true - tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:{${{ steps.git_sha.outputs.sha }} \ No newline at end of file + tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:${{ steps.git_sha.outputs.sha }} \ No newline at end of file From 17da83bca8b1262f44a2bf8f888b50c62a1bb8be Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 16:10:13 -0800 Subject: [PATCH 362/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20wait=20for=20other?= =?UTF-8?q?=20workflows=20to=20complete=20(#26)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 👷 ci: wait for other workflows to complete * 🔨 chore: add a sample env --- .env.example | 5 ++ ...aging.yml => build-and-deploy-to-main.yml} | 16 ++--- .../workflows/build-and-deploy-to-staging.yml | 68 +++++++++++++++++++ .../workflows/deploy-app-to-staging-slot.yml | 67 ------------------ README.md | 62 ++++++----------- pyproject.toml | 2 +- 6 files changed, 100 insertions(+), 120 deletions(-) create mode 100644 .env.example rename .github/workflows/{build-staging.yml => build-and-deploy-to-main.yml} (80%) create mode 100644 .github/workflows/build-and-deploy-to-staging.yml delete mode 100644 .github/workflows/deploy-app-to-staging-slot.yml diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..67ab406e --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +FAIRHUB_DATABASE_URL="postgresql://admin:root@localhost:5432/fairhub_local" +FAIRHUB_SECRET="AddAny32+CharacterCountWordHereAsYourSecret" + +FAIRHUB_AZURE_READ_SAS_TOKEN= +FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME= \ No newline at end of file diff --git a/.github/workflows/build-staging.yml b/.github/workflows/build-and-deploy-to-main.yml similarity index 80% rename from .github/workflows/build-staging.yml rename to .github/workflows/build-and-deploy-to-main.yml index d82ad01e..e542d27a 100644 --- a/.github/workflows/build-staging.yml +++ b/.github/workflows/build-and-deploy-to-main.yml @@ -1,14 +1,10 @@ -name: Build and push Docker image to Azure Container Registry (staging) +name: Build and push Docker image to Azure Container Registry (main) on: - push: - branches: - - staging - pull_request: - types: [closed] - branches: - - staging - workflow_dispatch: + pull_request: + types: [closed] + branches: + - main jobs: build: @@ -42,4 +38,4 @@ jobs: context: . file: ./Dockerfile push: true - tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:${{ steps.git_sha.outputs.sha }} \ No newline at end of file + tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:latest,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:${{ steps.git_sha.outputs.sha }} \ No newline at end of file diff --git a/.github/workflows/build-and-deploy-to-staging.yml b/.github/workflows/build-and-deploy-to-staging.yml new file mode 100644 index 00000000..98ea39bc --- /dev/null +++ b/.github/workflows/build-and-deploy-to-staging.yml @@ -0,0 +1,68 @@ +# Deployed to https://staging.api.fairhub.io + +name: Build and push Docker image to Azure Container Registry (staging) + +on: + push: + branches: + - staging + pull_request: + types: [closed] + branches: + - staging + workflow_dispatch: + +jobs: + build: + name: Build and push Docker image + if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.merged == true) + runs-on: ubuntu-latest + env: + FAIRHUB_DATABASE_URL: ${{ secrets.FAIRHUB_STAGING_DATABASE_URL }} + AZURE_REGISTRY_LOGIN_SERVER: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} + AZURE_REGISTRY_USERNAME: ${{ secrets.AZURE_REGISTRY_USERNAME }} + AZURE_REGISTRY_PASSWORD: ${{ secrets.AZURE_REGISTRY_PASSWORD }} + + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Wait for format check + uses: lewagon/wait-on-check-action@v1.3.1 + with: + ref: ${{github.ref}} + repo-token: ${{ secrets.GITHUB_TOKEN }} + check-name: 'Format with Black' + + - name: Wait for linting to pass + uses: lewagon/wait-on-check-action@v1.3.1 + with: + ref: ${{github.ref}} + repo-token: ${{ secrets.GITHUB_TOKEN }} + check-name: 'Lint with pylint' + + - name: Wait for tests to pass + uses: lewagon/wait-on-check-action@v1.3.1 + with: + ref: ${{github.ref}} + repo-token: ${{ secrets.GITHUB_TOKEN }} + check-name: 'Run pytest (with capture)' + + - name: Login to Azure Container Registry + uses: azure/docker-login@v1 + with: + login-server: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} + username: ${{ secrets.AZURE_REGISTRY_USERNAME }} + password: ${{ secrets.AZURE_REGISTRY_PASSWORD }} + + - name: Get GitHub short SHA + id: git_sha + run: echo ::set-output name=sha::$(git rev-parse --short ${{ github.sha }}) + + - name: Build and push Docker image + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile + push: true + tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:${{ steps.git_sha.outputs.sha }} \ No newline at end of file diff --git a/.github/workflows/deploy-app-to-staging-slot.yml b/.github/workflows/deploy-app-to-staging-slot.yml deleted file mode 100644 index 65ba15ef..00000000 --- a/.github/workflows/deploy-app-to-staging-slot.yml +++ /dev/null @@ -1,67 +0,0 @@ -# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy -# More GitHub Actions for Azure: https://github.com/Azure/actions -# More info on Python, GitHub Actions, and Azure App Service: https://aka.ms/python-webapps-actions - -name: Build and deploy Python app to Azure Web App - api-fairhub-io - -on: - push: - branches: - - staging - pull_request: - types: [opened, synchronize, reopened, closed] - branches: - - staging - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python version - uses: actions/setup-python@v1 - with: - python-version: "3.8" - - - name: Create and start virtual environment - run: | - python -m venv venv - source venv/bin/activate - - - name: Install dependencies - run: pip install poetry==1.3.2 && poetry install - - # Optional: Add step to run tests here (PyTest, Django test suites, etc.) - - - name: Upload artifact for deployment jobs - uses: actions/upload-artifact@v2 - with: - name: python-app - path: | - . - !venv/ - - deploy: - runs-on: ubuntu-latest - needs: build - environment: - name: "staging" - url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} - - steps: - - name: Download artifact from build job - uses: actions/download-artifact@v2 - with: - name: python-app - path: . - - - name: "Deploy to Azure Web App" - uses: azure/webapps-deploy@v2 - id: deploy-to-webapp - with: - app-name: "api-fairhub-io" - slot-name: "staging" - publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_STAGING }} diff --git a/README.md b/README.md index 98a45277..a68b3b90 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ If you would like to update the api, please follow the instructions below. If you are using Anaconda, you can create a virtual environment with: ```bash - conda create -n fairhub-api-dev-env python=3.8 + conda create -n fairhub-api-dev-env python=3.10 conda activate fairhub-api-dev-env ``` @@ -38,54 +38,50 @@ If you would like to update the api, please follow the instructions below. You can also use version 1.2.0 of Poetry, but you will need to run `poetry lock` after installing the dependencies. -3. Add your modifications and run the tests: +3. Add your environment variables. An example is provided at `.env.example` ```bash - poetry run pytest + cp .env.example .env ``` - If you need to add new python packages, you can use Poetry to add them: + Make sure to update the values in `.env` to match your local setup. + +4. Add your modifications and run the tests: ```bash - poetry add + poetry run pytest ``` -4. Format the code: + If you need to add new python packages, you can use Poetry to add them: ```bash - poe format + poetry add ``` -5. Check the code quality: +5. Format the code: ```bash - poetry run flake8 pyfairdatatools tests + poe format ``` -6. Run the tests and check the code coverage: +6. Check the code quality: ```bash - poe test - poe test --cov=pyfairdatatools + poe typecheck + poe lint + poe flake8 ``` -7. Build the package: - - Update the version number in `pyproject.toml` and `pyfairdatatools/__init__.py` and then run: - - ```text - poetry build - ``` + You can also use `poe precommit` to run both formatting and linting. -8. Publish the package: +7. Run the tests and check the code coverage: ```bash - poetry publish + poe test + poe test_with_capture # if you want to see console output ``` -## Docker - -### Database +## Database The api uses a postgres database. You can run a postgres database locally using docker: @@ -101,24 +97,6 @@ docker-compose -f ./db-docker-compose.yml down -v This database will not persist data between runs. -### API - -If you would like to run the api locally, you can use docker. - -1. Build the docker image: - - ```bash - docker build --tag fairhub-flask-api:local . - ``` - - You can set the `--tag` to whatever you want. We recommend to use `fairhub-flask-api:local`. - -2. Run the docker image: - - ```bash - docker run -p 5000:5000 -e FAIRHUB_DATABASE_URL=postgres://connection-string fairhub-flask-api:local - ``` - ## License This work is licensed under diff --git a/pyproject.toml b/pyproject.toml index 60636010..e22f7d41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,6 +67,7 @@ isort = "^5.12.0" mypy = "^1.4.1" pydocstyle = "^6.3.0" pylint = "^2.17.4" +flake8 = "^6.0.0" # Testing pytest = "^7.1" @@ -88,7 +89,6 @@ poethepoet = "^0.20.0" # Jupyter jupyter = "^1.0.0" -flake8 = "^6.0.0" # Types types-python-dateutil = "^2.8.19.14" From a7a4db025b676a1320afff8fbc946d7b255a9324 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 16:18:16 -0800 Subject: [PATCH 363/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20ref=20for?= =?UTF-8?q?=20wait=20on=20action?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-and-deploy-to-staging.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-and-deploy-to-staging.yml b/.github/workflows/build-and-deploy-to-staging.yml index 98ea39bc..7effd7bc 100644 --- a/.github/workflows/build-and-deploy-to-staging.yml +++ b/.github/workflows/build-and-deploy-to-staging.yml @@ -30,21 +30,21 @@ jobs: - name: Wait for format check uses: lewagon/wait-on-check-action@v1.3.1 with: - ref: ${{github.ref}} + ref: ${{github.sha}} repo-token: ${{ secrets.GITHUB_TOKEN }} check-name: 'Format with Black' - name: Wait for linting to pass uses: lewagon/wait-on-check-action@v1.3.1 with: - ref: ${{github.ref}} + ref: ${{github.sha}} repo-token: ${{ secrets.GITHUB_TOKEN }} check-name: 'Lint with pylint' - name: Wait for tests to pass uses: lewagon/wait-on-check-action@v1.3.1 with: - ref: ${{github.ref}} + ref: ${{github.sha}} repo-token: ${{ secrets.GITHUB_TOKEN }} check-name: 'Run pytest (with capture)' From 5e5d831766a7ac5544bff2197c68431465ef3d26 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 16:21:44 -0800 Subject: [PATCH 364/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20ref=20for?= =?UTF-8?q?=20wait=20on=20action?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-and-deploy-to-staging.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-and-deploy-to-staging.yml b/.github/workflows/build-and-deploy-to-staging.yml index 7effd7bc..448e7eea 100644 --- a/.github/workflows/build-and-deploy-to-staging.yml +++ b/.github/workflows/build-and-deploy-to-staging.yml @@ -30,21 +30,21 @@ jobs: - name: Wait for format check uses: lewagon/wait-on-check-action@v1.3.1 with: - ref: ${{github.sha}} + ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} check-name: 'Format with Black' - name: Wait for linting to pass uses: lewagon/wait-on-check-action@v1.3.1 with: - ref: ${{github.sha}} + ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} check-name: 'Lint with pylint' - name: Wait for tests to pass uses: lewagon/wait-on-check-action@v1.3.1 with: - ref: ${{github.sha}} + ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} check-name: 'Run pytest (with capture)' From f9577770fd1b339e7e0caa7216b35307bc83d726 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 16:24:38 -0800 Subject: [PATCH 365/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20workflow?= =?UTF-8?q?=20names?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-and-deploy-to-staging.yml | 6 +++--- .github/workflows/lint.yml | 2 +- .github/workflows/test.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-and-deploy-to-staging.yml b/.github/workflows/build-and-deploy-to-staging.yml index 448e7eea..8dc2e115 100644 --- a/.github/workflows/build-and-deploy-to-staging.yml +++ b/.github/workflows/build-and-deploy-to-staging.yml @@ -32,21 +32,21 @@ jobs: with: ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Format with Black' + check-name: 'Run formatter' - name: Wait for linting to pass uses: lewagon/wait-on-check-action@v1.3.1 with: ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Lint with pylint' + check-name: 'Run linters (3.10)' - name: Wait for tests to pass uses: lewagon/wait-on-check-action@v1.3.1 with: ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Run pytest (with capture)' + check-name: 'Run tests (3.10)' - name: Login to Azure Container Registry uses: azure/docker-login@v1 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 30adb21d..44975a75 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8"] + python-version: ["3.10"] steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1f9a28ad..3d0b6984 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8"] + python-version: ["3.10"] env: # These are simulated secrets for test workflow only. FAIRHUB_DATABASE_URL: postgresql://admin:root@localhost:5432/fairhub_local From 40b6de51c9b39d9b41387d9974f7b105b98cb5d1 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 16:33:56 -0800 Subject: [PATCH 366/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20update=20build=20v?= =?UTF-8?q?ariables?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../workflows/build-and-deploy-to-main.yml | 37 ++++++++++++++++--- .../workflows/build-and-deploy-to-staging.yml | 7 +++- 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-and-deploy-to-main.yml b/.github/workflows/build-and-deploy-to-main.yml index e542d27a..50ee0211 100644 --- a/.github/workflows/build-and-deploy-to-main.yml +++ b/.github/workflows/build-and-deploy-to-main.yml @@ -1,10 +1,15 @@ -name: Build and push Docker image to Azure Container Registry (main) +name: (main) Build and push api image to Azure Container Registry on: - pull_request: - types: [closed] - branches: - - main + pull_request: + types: [closed] + branches: + - main + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true jobs: build: @@ -12,7 +17,6 @@ jobs: if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.merged == true) runs-on: ubuntu-latest env: - FAIRHUB_DATABASE_URL: ${{ secrets.FAIRHUB_STAGING_DATABASE_URL }} AZURE_REGISTRY_LOGIN_SERVER: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} AZURE_REGISTRY_USERNAME: ${{ secrets.AZURE_REGISTRY_USERNAME }} AZURE_REGISTRY_PASSWORD: ${{ secrets.AZURE_REGISTRY_PASSWORD }} @@ -21,6 +25,27 @@ jobs: - name: Checkout uses: actions/checkout@v2 + - name: Wait for format check + uses: lewagon/wait-on-check-action@v1.3.1 + with: + ref: main + repo-token: ${{ secrets.GITHUB_TOKEN }} + check-name: 'Run formatter' + + - name: Wait for linting to pass + uses: lewagon/wait-on-check-action@v1.3.1 + with: + ref: main + repo-token: ${{ secrets.GITHUB_TOKEN }} + check-name: 'Run linters (3.10)' + + - name: Wait for tests to pass + uses: lewagon/wait-on-check-action@v1.3.1 + with: + ref: main + repo-token: ${{ secrets.GITHUB_TOKEN }} + check-name: 'Run tests (3.10)' + - name: Login to Azure Container Registry uses: azure/docker-login@v1 with: diff --git a/.github/workflows/build-and-deploy-to-staging.yml b/.github/workflows/build-and-deploy-to-staging.yml index 8dc2e115..a1a32b2f 100644 --- a/.github/workflows/build-and-deploy-to-staging.yml +++ b/.github/workflows/build-and-deploy-to-staging.yml @@ -1,6 +1,6 @@ # Deployed to https://staging.api.fairhub.io -name: Build and push Docker image to Azure Container Registry (staging) +name: (staging) Build and push api image to Azure Container Registry on: push: @@ -12,13 +12,16 @@ on: - staging workflow_dispatch: +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + jobs: build: name: Build and push Docker image if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.pull_request.merged == true) runs-on: ubuntu-latest env: - FAIRHUB_DATABASE_URL: ${{ secrets.FAIRHUB_STAGING_DATABASE_URL }} AZURE_REGISTRY_LOGIN_SERVER: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} AZURE_REGISTRY_USERNAME: ${{ secrets.AZURE_REGISTRY_USERNAME }} AZURE_REGISTRY_PASSWORD: ${{ secrets.AZURE_REGISTRY_PASSWORD }} From d5cfdac8743217a41d2ab0745f0729e8e4d093fe Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 16:35:24 -0800 Subject: [PATCH 367/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20test=20concurrency?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-and-deploy-to-main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build-and-deploy-to-main.yml b/.github/workflows/build-and-deploy-to-main.yml index 50ee0211..1f15603d 100644 --- a/.github/workflows/build-and-deploy-to-main.yml +++ b/.github/workflows/build-and-deploy-to-main.yml @@ -1,3 +1,5 @@ +# Will be deployed to https://api.fairhub.io + name: (main) Build and push api image to Azure Container Registry on: From 15228c672c5a7050d5c6b87e8226c7e5de19b582 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 17:32:52 -0800 Subject: [PATCH 368/505] =?UTF-8?q?=E2=9C=A8=20feat:=20use=20waitress=20as?= =?UTF-8?q?=20a=20wsgi=20server=20(#27)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✨ feat: use waitress for server * 🚩 chore: add a azure check for schema destroy * style: 🎨 fix code style issues with Black * 🛂 fix: update cors allowed origins * style: 🎨 fix code style issues with Black --------- Co-authored-by: Lint Action --- app.py | 40 ++-- ...e.yaml => coolify-prod-docker-compose.yaml | 0 entrypoint.sh | 2 +- poetry.lock | 225 ++++++++++++++++-- pyproject.toml | 2 + 5 files changed, 233 insertions(+), 36 deletions(-) rename prod-docker-compose.yaml => coolify-prod-docker-compose.yaml (100%) diff --git a/app.py b/app.py index fe69d442..88aa7667 100644 --- a/app.py +++ b/app.py @@ -10,6 +10,7 @@ from flask_bcrypt import Bcrypt from flask_cors import CORS from sqlalchemy import MetaData +from waitress import serve import config import model @@ -41,21 +42,19 @@ def create_app(config_module=None): # csrf = CSRFProtect() # csrf.init_app(app) - app.config.from_prefixed_env("FAIRHUB") - - # print(app.config) if config.FAIRHUB_SECRET: if len(config.FAIRHUB_SECRET) < 32: raise RuntimeError("FAIRHUB_SECRET must be at least 32 characters long") else: raise RuntimeError("FAIRHUB_SECRET not set") - if "DATABASE_URL" in app.config: + if config.FAIRHUB_DATABASE_URL: # if "TESTING" in app_config and app_config["TESTING"]: # pass # else: # print("DATABASE_URL: ", app.config["DATABASE_URL"]) - app.config["SQLALCHEMY_DATABASE_URI"] = app.config["DATABASE_URL"] + # app.config["SQLALCHEMY_DATABASE_URI"] = app.config["DATABASE_URL"] + app.config["SQLALCHEMY_DATABASE_URI"] = config.FAIRHUB_DATABASE_URL else: # throw error raise RuntimeError("FAIRHUB_DATABASE_URL not set") @@ -64,17 +63,19 @@ def create_app(config_module=None): api.init_app(app) bcrypt.init_app(app) + cors_origins = [ + "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://staging.fairhub.io", + "https://fairhub.io", + ] + # Only allow CORS origin for localhost:3000 # and any subdomain of azurestaticapps.net/ CORS( app, resources={ "/*": { - "origins": [ - "http://localhost:3000", - "https:\/\/brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string - "https://fairhub.io", - ], + "origins": cors_origins, } }, allow_headers=[ @@ -141,11 +142,7 @@ def on_after_request(resp): if "token" not in request.cookies: return resp - token: str = ( - request.cookies.get("token") - if request.cookies.get("token") - else "" # type: ignore - ) + token: str = request.cookies.get("token") or "" # type: ignore # Determine the appropriate configuration module based on the testing context if os.environ.get("FLASK_ENV") == "testing": @@ -210,7 +207,13 @@ def validation_exception_handler(error): @app.cli.command("destroy-schema") def destroy_schema(): """Create the database schema.""" + + # if db is azure, then skip + if config.FAIRHUB_DATABASE_URL.find("azure") > -1: + return + engine = model.db.session.get_bind() + with engine.begin(): model.db.drop_all() @@ -219,8 +222,8 @@ def destroy_schema(): metadata = MetaData() metadata.reflect(bind=engine) table_names = [table.name for table in metadata.tables.values()] - # print(table_names) - if len(table_names) == 0: + + if not table_names: with engine.begin(): model.db.create_all() return app @@ -238,4 +241,5 @@ def destroy_schema(): flask_app = create_app() - flask_app.run(host="0.0.0.0", port=port) + # flask_app.run(host="0.0.0.0", port=port) + serve(flask_app, port=port) diff --git a/prod-docker-compose.yaml b/coolify-prod-docker-compose.yaml similarity index 100% rename from prod-docker-compose.yaml rename to coolify-prod-docker-compose.yaml diff --git a/entrypoint.sh b/entrypoint.sh index 3b8006dc..b63e8120 100644 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -2,4 +2,4 @@ alembic upgrade head -flask run --host=0.0.0.0 --port=5000 \ No newline at end of file +python app.py diff --git a/poetry.lock b/poetry.lock index b9e4bb35..59c6b735 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "alembic" version = "1.12.1" description = "A database migration tool for SQLAlchemy." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -25,6 +26,7 @@ tz = ["python-dateutil"] name = "aniso8601" version = "9.0.1" description = "A library for parsing ISO 8601 strings." +category = "main" optional = false python-versions = "*" files = [ @@ -39,6 +41,7 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -60,6 +63,7 @@ trio = ["trio (<0.22)"] name = "appnope" version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" +category = "dev" optional = false python-versions = "*" files = [ @@ -71,6 +75,7 @@ files = [ name = "argon2-cffi" version = "21.3.0" description = "The secure Argon2 password hashing algorithm." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -90,6 +95,7 @@ tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -127,6 +133,7 @@ tests = ["pytest"] name = "arrow" version = "1.2.3" description = "Better dates & times for Python" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -141,6 +148,7 @@ python-dateutil = ">=2.7.0" name = "art" version = "6.0" description = "ASCII Art Library For Python" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -155,6 +163,7 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture name = "astroid" version = "2.15.6" description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -174,6 +183,7 @@ wrapt = [ name = "asttokens" version = "2.2.1" description = "Annotate AST trees with source code positions" +category = "dev" optional = false python-versions = "*" files = [ @@ -191,6 +201,7 @@ test = ["astroid", "pytest"] name = "async-lru" version = "2.0.4" description = "Simple LRU cache for asyncio" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -205,6 +216,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -223,6 +235,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "babel" version = "2.12.1" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -237,6 +250,7 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} name = "backcall" version = "0.2.0" description = "Specifications for callback functions passed in to an API" +category = "dev" optional = false python-versions = "*" files = [ @@ -248,6 +262,7 @@ files = [ name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -282,6 +297,7 @@ typecheck = ["mypy"] name = "beautifulsoup4" version = "4.12.2" description = "Screen-scraping library" +category = "dev" optional = false python-versions = ">=3.6.0" files = [ @@ -300,6 +316,7 @@ lxml = ["lxml"] name = "black" version = "23.7.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -346,6 +363,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "bleach" version = "6.0.0" description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -364,6 +382,7 @@ css = ["tinycss2 (>=1.1.0,<1.2)"] name = "blinker" version = "1.6.2" description = "Fast, simple object-to-object and broadcast signaling" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -375,6 +394,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -386,6 +406,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." +category = "dev" optional = false python-versions = "*" files = [ @@ -462,6 +483,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -546,6 +568,7 @@ files = [ name = "click" version = "8.1.6" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -560,6 +583,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -571,6 +595,7 @@ files = [ name = "comm" version = "0.1.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -590,6 +615,7 @@ typing = ["mypy (>=0.990)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -665,6 +691,7 @@ toml = ["tomli"] name = "coveragespace" version = "6.0.2" description = "A place to track your code coverage metrics." +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -683,6 +710,7 @@ requests = ">=2.28,<3.0" name = "debugpy" version = "1.6.7" description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -710,6 +738,7 @@ files = [ name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -721,6 +750,7 @@ files = [ name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -732,6 +762,7 @@ files = [ name = "dicttoxml" version = "1.7.16" description = "Converts a Python dictionary or other native data type into a valid XML string." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -743,6 +774,7 @@ files = [ name = "dill" version = "0.3.7" description = "serialize all of Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -757,6 +789,7 @@ graph = ["objgraph (>=1.7.2)"] name = "dnspython" version = "2.4.2" description = "DNS toolkit" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -776,6 +809,7 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" +category = "dev" optional = false python-versions = "*" files = [ @@ -786,6 +820,7 @@ files = [ name = "email-validator" version = "2.0.0.post2" description = "A robust email address syntax and deliverability validation library." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -801,6 +836,7 @@ idna = ">=2.0.0" name = "exceptiongroup" version = "1.1.2" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -815,6 +851,7 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" +category = "dev" optional = false python-versions = "*" files = [ @@ -829,6 +866,7 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "faker" version = "18.13.0" description = "Faker is a Python package that generates fake data for you." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -843,6 +881,7 @@ python-dateutil = ">=2.4" name = "fastjsonschema" version = "2.18.0" description = "Fastest Python implementation of JSON schema" +category = "dev" optional = false python-versions = "*" files = [ @@ -857,6 +896,7 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -873,6 +913,7 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.2" description = "A simple framework for building complex web applications." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -896,6 +937,7 @@ dotenv = ["python-dotenv"] name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." +category = "main" optional = false python-versions = "*" files = [ @@ -911,6 +953,7 @@ Flask = "*" name = "flask-cors" version = "4.0.0" description = "A Flask extension adding a decorator for CORS support" +category = "main" optional = false python-versions = "*" files = [ @@ -925,6 +968,7 @@ Flask = ">=0.9" name = "flask-restx" version = "1.1.0" description = "Fully featured framework for fast, easy and documented API development with Flask" +category = "main" optional = false python-versions = "*" files = [ @@ -948,6 +992,7 @@ test = ["Faker (==2.0.0)", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pyt name = "flask-sqlalchemy" version = "3.0.5" description = "Add SQLAlchemy support to your Flask application." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -963,6 +1008,7 @@ sqlalchemy = ">=1.4.18" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" files = [ @@ -974,6 +1020,7 @@ files = [ name = "freezegun" version = "1.2.2" description = "Let your Python tests travel through time" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -988,6 +1035,7 @@ python-dateutil = ">=2.7" name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." +category = "dev" optional = false python-versions = "*" files = [ @@ -1005,6 +1053,7 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "greenlet" version = "2.0.2" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" files = [ @@ -1078,6 +1127,7 @@ test = ["objgraph", "psutil"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1089,6 +1139,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1108,6 +1159,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.0.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1126,6 +1178,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1137,6 +1190,7 @@ files = [ name = "ipykernel" version = "6.25.0" description = "IPython Kernel for Jupyter" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1150,7 +1204,7 @@ comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" @@ -1170,6 +1224,7 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" name = "ipython" version = "8.12.2" description = "IPython: Productive Interactive Computing" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1209,6 +1264,7 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa name = "ipython-genutils" version = "0.2.0" description = "Vestigial utilities from IPython" +category = "dev" optional = false python-versions = "*" files = [ @@ -1220,6 +1276,7 @@ files = [ name = "ipywidgets" version = "8.1.0" description = "Jupyter interactive widgets" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1241,6 +1298,7 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1255,6 +1313,7 @@ arrow = ">=0.15.0" name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1272,6 +1331,7 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] name = "itsdangerous" version = "2.1.2" description = "Safely pass data to untrusted environments and back." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1283,6 +1343,7 @@ files = [ name = "jedi" version = "0.19.0" description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1302,6 +1363,7 @@ testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1319,6 +1381,7 @@ i18n = ["Babel (>=2.7)"] name = "json5" version = "0.9.14" description = "A Python implementation of the JSON5 data format." +category = "dev" optional = false python-versions = "*" files = [ @@ -1333,6 +1396,7 @@ dev = ["hypothesis"] name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -1344,6 +1408,7 @@ files = [ name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1375,6 +1440,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1390,6 +1456,7 @@ referencing = ">=0.28.0" name = "jupyter" version = "1.0.0" description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "dev" optional = false python-versions = "*" files = [ @@ -1410,6 +1477,7 @@ qtconsole = "*" name = "jupyter-client" version = "8.3.0" description = "Jupyter protocol implementation and client libraries" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1419,7 +1487,7 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1433,6 +1501,7 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt name = "jupyter-console" version = "6.6.3" description = "Jupyter terminal console" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1444,7 +1513,7 @@ files = [ ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" prompt-toolkit = ">=3.0.30" pygments = "*" pyzmq = ">=17" @@ -1457,6 +1526,7 @@ test = ["flaky", "pexpect", "pytest"] name = "jupyter-core" version = "5.3.1" description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1477,6 +1547,7 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] name = "jupyter-events" version = "0.7.0" description = "Jupyter Event System library" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1502,6 +1573,7 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p name = "jupyter-lsp" version = "2.2.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1517,6 +1589,7 @@ jupyter-server = ">=1.1.2" name = "jupyter-server" version = "2.7.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1529,7 +1602,7 @@ anyio = ">=3.1.0" argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" jupyter-events = ">=0.6.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" @@ -1553,6 +1626,7 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc name = "jupyter-server-terminals" version = "0.4.4" description = "A Jupyter Server Extension Providing Terminals." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1572,6 +1646,7 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", name = "jupyterlab" version = "4.0.3" description = "JupyterLab computational environment" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1605,6 +1680,7 @@ test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-cons name = "jupyterlab-pygments" version = "0.2.2" description = "Pygments theme using JupyterLab CSS variables" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1616,6 +1692,7 @@ files = [ name = "jupyterlab-server" version = "2.24.0" description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1642,6 +1719,7 @@ test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-valida name = "jupyterlab-widgets" version = "3.0.8" description = "Jupyter interactive widgets for JupyterLab" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1653,6 +1731,7 @@ files = [ name = "lazy-object-proxy" version = "1.9.0" description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1698,6 +1777,7 @@ files = [ name = "mako" version = "1.3.0" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1717,6 +1797,7 @@ testing = ["pytest"] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1734,6 +1815,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1793,6 +1875,7 @@ files = [ name = "matplotlib-inline" version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1807,6 +1890,7 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1818,6 +1902,7 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1829,6 +1914,7 @@ files = [ name = "minilog" version = "2.2" description = "Minimalistic wrapper for Python logging." +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1840,6 +1926,7 @@ files = [ name = "mistune" version = "3.0.1" description = "A sane and fast Markdown parser with useful plugins and renderers" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1851,6 +1938,7 @@ files = [ name = "mkdocs" version = "1.3.1" description = "Project documentation with Markdown." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1877,6 +1965,7 @@ i18n = ["babel (>=2.9.0)"] name = "mypy" version = "1.4.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1923,6 +2012,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1934,6 +2024,7 @@ files = [ name = "nbclient" version = "0.8.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1943,7 +2034,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -1956,6 +2047,7 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= name = "nbconvert" version = "7.7.3" description = "Converting Jupyter Notebooks" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1994,6 +2086,7 @@ webpdf = ["playwright"] name = "nbformat" version = "5.9.2" description = "The Jupyter Notebook format" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2015,6 +2108,7 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.5.7" description = "Patch asyncio to allow nested event loops" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2026,6 +2120,7 @@ files = [ name = "notebook" version = "7.0.1" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2050,6 +2145,7 @@ test = ["ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[tes name = "notebook-shim" version = "0.2.3" description = "A shim layer for notebook traits and config" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2067,6 +2163,7 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "overrides" version = "7.3.1" description = "A decorator to automatically detect mismatch when overriding a method." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2078,6 +2175,7 @@ files = [ name = "packaging" version = "23.1" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2089,6 +2187,7 @@ files = [ name = "pandocfilters" version = "1.5.0" description = "Utilities for writing pandoc filters in python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2100,6 +2199,7 @@ files = [ name = "parso" version = "0.8.3" description = "A Python Parser" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2115,6 +2215,7 @@ testing = ["docopt", "pytest (<6.0.0)"] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2126,6 +2227,7 @@ files = [ name = "pathspec" version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2137,6 +2239,7 @@ files = [ name = "pexpect" version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." +category = "dev" optional = false python-versions = "*" files = [ @@ -2151,6 +2254,7 @@ ptyprocess = ">=0.5" name = "pickleshare" version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" optional = false python-versions = "*" files = [ @@ -2162,6 +2266,7 @@ files = [ name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2173,6 +2278,7 @@ files = [ name = "platformdirs" version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2188,6 +2294,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2203,6 +2310,7 @@ testing = ["pytest", "pytest-benchmark"] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2221,6 +2329,7 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2235,6 +2344,7 @@ twisted = ["twisted"] name = "prompt-toolkit" version = "3.0.39" description = "Library for building powerful interactive command lines in Python" +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -2249,6 +2359,7 @@ wcwidth = "*" name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2275,6 +2386,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "psycopg2" version = "2.9.6" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2297,6 +2409,7 @@ files = [ name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -2308,6 +2421,7 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "dev" optional = false python-versions = "*" files = [ @@ -2322,6 +2436,7 @@ tests = ["pytest"] name = "pycodestyle" version = "2.11.0" description = "Python style guide checker" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2333,6 +2448,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2344,6 +2460,7 @@ files = [ name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2361,6 +2478,7 @@ toml = ["tomli (>=1.2.3)"] name = "pyfairdatatools" version = "0.1.3" description = "Tools for AI-READI" +category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2383,6 +2501,7 @@ validators = ">=0.20.0,<0.21.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2394,6 +2513,7 @@ files = [ name = "pygments" version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2408,6 +2528,7 @@ plugins = ["importlib-metadata"] name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2425,6 +2546,7 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.5" description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.7.2" files = [ @@ -2454,6 +2576,7 @@ testutils = ["gitpython (>3)"] name = "pymdown-extensions" version = "10.1" description = "Extension pack for Python Markdown." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2469,6 +2592,7 @@ pyyaml = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2491,6 +2615,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2509,6 +2634,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-describe" version = "2.1.0" description = "Describe-style plugin for pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2523,6 +2649,7 @@ pytest = ">=4.6,<8" name = "pytest-expecter" version = "3.0" description = "Better testing with expecter and pytest." +category = "dev" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -2534,6 +2661,7 @@ files = [ name = "pytest-random" version = "0.02" description = "py.test plugin to randomize tests" +category = "dev" optional = false python-versions = "*" files = [ @@ -2547,6 +2675,7 @@ pytest = ">=2.2.3" name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2561,6 +2690,7 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2575,6 +2705,7 @@ cli = ["click (>=5.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2586,6 +2717,7 @@ files = [ name = "pytz" version = "2023.3" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -2597,6 +2729,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "dev" optional = false python-versions = "*" files = [ @@ -2620,6 +2753,7 @@ files = [ name = "pywinpty" version = "2.0.11" description = "Pseudo terminal support for Windows from Python." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2634,6 +2768,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2642,7 +2777,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2650,15 +2784,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2675,7 +2802,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2683,7 +2809,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2693,6 +2818,7 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2707,6 +2833,7 @@ pyyaml = "*" name = "pyzmq" version = "25.1.0" description = "Python bindings for 0MQ" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2796,6 +2923,7 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} name = "qtconsole" version = "5.4.3" description = "Jupyter Qt console" +category = "dev" optional = false python-versions = ">= 3.7" files = [ @@ -2822,6 +2950,7 @@ test = ["flaky", "pytest", "pytest-qt"] name = "qtpy" version = "2.3.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2839,6 +2968,7 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] name = "referencing" version = "0.30.0" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2854,6 +2984,7 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2875,6 +3006,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2889,6 +3021,7 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -2900,6 +3033,7 @@ files = [ name = "rpds-py" version = "0.9.2" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3006,6 +3140,7 @@ files = [ name = "send2trash" version = "1.8.2" description = "Send file to trash natively under Mac OS X, Windows and Linux" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3022,6 +3157,7 @@ win32 = ["pywin32"] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3033,6 +3169,7 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3044,6 +3181,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -3055,6 +3193,7 @@ files = [ name = "soupsieve" version = "2.4.1" description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3066,6 +3205,7 @@ files = [ name = "sqlalchemy" version = "2.0.19" description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3144,6 +3284,7 @@ sqlcipher = ["sqlcipher3-binary"] name = "stack-data" version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" optional = false python-versions = "*" files = [ @@ -3163,6 +3304,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "terminado" version = "0.17.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3183,6 +3325,7 @@ test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] name = "tinycss2" version = "1.2.1" description = "A tiny CSS parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3201,6 +3344,7 @@ test = ["flake8", "isort", "pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3212,6 +3356,7 @@ files = [ name = "tomlkit" version = "0.12.1" description = "Style preserving TOML library" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3223,6 +3368,7 @@ files = [ name = "tornado" version = "6.3.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" optional = false python-versions = ">= 3.8" files = [ @@ -3243,6 +3389,7 @@ files = [ name = "traitlets" version = "5.9.0" description = "Traitlets Python configuration system" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3258,6 +3405,7 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] name = "types-python-dateutil" version = "2.8.19.14" description = "Typing stubs for python-dateutil" +category = "dev" optional = false python-versions = "*" files = [ @@ -3269,6 +3417,7 @@ files = [ name = "types-requests" version = "2.31.0.2" description = "Typing stubs for requests" +category = "main" optional = false python-versions = "*" files = [ @@ -3283,6 +3432,7 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" +category = "main" optional = false python-versions = "*" files = [ @@ -3290,10 +3440,23 @@ files = [ {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, ] +[[package]] +name = "types-waitress" +version = "2.1.4.9" +description = "Typing stubs for waitress" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "types-waitress-2.1.4.9.tar.gz", hash = "sha256:7105eb78110c0c123c9bed7c8eb41c040490d6b0d019bd53add5f9406bba10f1"}, + {file = "types_waitress-2.1.4.9-py3-none-any.whl", hash = "sha256:ecc432cad266d1f434008bd790eb63554808a60133a772331f67454a4960255c"}, +] + [[package]] name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3305,6 +3468,7 @@ files = [ name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3319,6 +3483,7 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "urllib3" version = "1.26.16" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -3335,6 +3500,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." +category = "main" optional = false python-versions = ">=3.4" files = [ @@ -3347,10 +3513,27 @@ decorator = ">=3.4.0" [package.extras] test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] +[[package]] +name = "waitress" +version = "2.1.2" +description = "Waitress WSGI server" +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "waitress-2.1.2-py3-none-any.whl", hash = "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a"}, + {file = "waitress-2.1.2.tar.gz", hash = "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba"}, +] + +[package.extras] +docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] +testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] + [[package]] name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3390,6 +3573,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wcwidth" version = "0.2.6" description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -3401,6 +3585,7 @@ files = [ name = "webcolors" version = "1.13" description = "A library for working with the color formats defined by HTML and CSS." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3416,6 +3601,7 @@ tests = ["pytest", "pytest-cov"] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" +category = "dev" optional = false python-versions = "*" files = [ @@ -3427,6 +3613,7 @@ files = [ name = "websocket-client" version = "1.6.1" description = "WebSocket client for Python with low level API options" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3443,6 +3630,7 @@ test = ["websockets"] name = "werkzeug" version = "2.3.6" description = "The comprehensive WSGI web application library." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3460,6 +3648,7 @@ watchdog = ["watchdog (>=2.3)"] name = "widgetsnbextension" version = "4.0.8" description = "Jupyter interactive widgets for Jupyter Notebook" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3471,6 +3660,7 @@ files = [ name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -3555,6 +3745,7 @@ files = [ name = "zipp" version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3569,4 +3760,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "6baf6175b51ec48a4653b31437eb4d02ff6385d8f973566192dbd31cb9c8c586" +content-hash = "f97f70aa5cac739e2d0e64da60bf877a54fc2571f2bf05bf39dad0dafa211c7d" diff --git a/pyproject.toml b/pyproject.toml index e22f7d41..1217a0ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,6 +54,7 @@ flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" email-validator = "^2.0.0.post2" alembic = "^1.12.1" +waitress = "^2.1.2" [tool.poetry.group.dev.dependencies] @@ -92,6 +93,7 @@ jupyter = "^1.0.0" # Types types-python-dateutil = "^2.8.19.14" +types-waitress = "^2.1.4.9" # Environment From 7fe152fdc2af836fc533bb05269bce4cd25a7b11 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 14 Nov 2023 17:57:08 -0800 Subject: [PATCH 369/505] =?UTF-8?q?=20=F0=9F=92=9A=20fix:=20update=20cors?= =?UTF-8?q?=20for=20local=20development=20(#28)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✨ feat: use waitress for server * 🚩 chore: add a azure check for schema destroy * style: 🎨 fix code style issues with Black * 🛂 fix: update cors allowed origins * style: 🎨 fix code style issues with Black * 💚 fix: update cors for local development --------- Co-authored-by: Lint Action --- app.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/app.py b/app.py index 88aa7667..ce1ee431 100644 --- a/app.py +++ b/app.py @@ -69,6 +69,9 @@ def create_app(config_module=None): "https://fairhub.io", ] + if app.debug: + cors_origins.append("http://localhost:3000") + # Only allow CORS origin for localhost:3000 # and any subdomain of azurestaticapps.net/ CORS( @@ -206,7 +209,7 @@ def validation_exception_handler(error): @app.cli.command("destroy-schema") def destroy_schema(): - """Create the database schema.""" + """destroy the database schema.""" # if db is azure, then skip if config.FAIRHUB_DATABASE_URL.find("azure") > -1: @@ -223,7 +226,8 @@ def destroy_schema(): metadata.reflect(bind=engine) table_names = [table.name for table in metadata.tables.values()] - if not table_names: + # The alembic table is created by default, so we need to check for more than 1 table + if len(table_names) <= 1: with engine.begin(): model.db.create_all() return app From 7093fd06bae359b3f05668434ff365cf91107de7 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 14 Nov 2023 20:10:58 -0800 Subject: [PATCH 370/505] =?UTF-8?q?=E2=AC=86=EF=B8=8F=20chore:=20cleaning?= =?UTF-8?q?=20up=20poe=20errors?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 3 + modules/etl/config/aireadi_config.py | 699 ++++++++++++--------- modules/etl/transforms/module_transform.py | 72 ++- modules/etl/vtypes/categorical.py | 8 +- modules/etl/vtypes/compound.py | 7 +- modules/etl/vtypes/continuous.py | 8 +- modules/etl/vtypes/discrete.py | 8 +- modules/etl/vtypes/mixed.py | 6 +- modules/etl/vtypes/timeseries.py | 12 +- modules/etl/vtypes/vtype.py | 2 +- 10 files changed, 470 insertions(+), 355 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index c9a3e92e..f5be63fa 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -308,6 +308,9 @@ def put(self, study_id: int): f"dashboard dashboard_modules is required to connect a dashboard: {data['dashboard_name']}", 400, ) + # Clear Redis Cache + # TODO: We want to clear the cache by dashboard_id, not the whole cache! + cache.clear() update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( data["dashboard_id"] ) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 655fd249..7dcb6cd8 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -1,4 +1,5 @@ import numpy as np +from datetime import datetime # Load API metadata from .env # dotenv.load_dotenv() @@ -115,7 +116,7 @@ redcapTransformConfig = { "reports": [ ( - "dashboard_data_generic", + "participant_values", {"report_id": 242544}, [ ("remap_values_by_columns", {"columns": data_columns}), @@ -124,7 +125,7 @@ ], ), ( - "dashboard_data_overview", + "instrument_statuses", {"report_id": 251954}, [ ( @@ -136,7 +137,7 @@ ], ), ( - "dashboard_data_repeat_instruments", + "repeat_instruments", {"report_id": 259920}, [ ("drop_rows", {"columns": repeat_survey_columns}), @@ -152,11 +153,11 @@ ), ], "merge_transformed_reports": ( - "dashboard_data_generic", + "participant_values", [ - ("dashboard_data_overview", {"on": index_columns, "how": "inner"}), + ("instrument_statuses", {"on": index_columns, "how": "inner"}), ( - "dashboard_data_repeat_instruments", + "repeat_instruments", {"on": index_columns, "how": "outer"}, ), ], @@ -176,227 +177,6 @@ # Visualization Transforms # -# Sex & Gender Counts by Site -sexGenderTransformConfig = ( - "simpleTransform", - { - "key": "sex-and-gender", - "strict": True, - "transforms": { - "name": "Sex & Gender", - "vtype": "DoubleCategorical", - "method": { - "groups": ["siteid", "scrsex", "genderid"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Gender", - "field": "genderid", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Gender", - "field": "genderid", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - }, -) - -# Race & Ethnicity Counts by Site -raceEthnicityTransformConfig = ( - "simpleTransform", - { - "key": "race-and-ethnicity", - "strict": True, - "transforms": { - "name": "Race & Ethnicity", - "vtype": "DoubleCategorical", - "method": { - "groups": ["siteid", "race", "ethnic"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Ethnicity", - "field": "ethnic", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Ethnicity", - "field": "ethnic", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - }, -) - -# Phenotypes -phenotypesTransformConfig = ( - "compoundTransform", - { - "key": "phenotype", - "strict": True, - "transforms": [ - { - "name": "Prediabetes", - "vtype": "SingleCategorical", - "method": { - "groups": ["siteid", "mhterm_predm"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Prediabetes", - "field": "mhterm_predm", - "remap": lambda x: "Yes Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Prediabetes", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Prediabetes", - "field": "mhterm_predm", - "remap": lambda x: "Yes Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Prediabetes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Type I Diabetes", - "vtype": "SingleCategorical", - "method": { - "groups": ["siteid", "mhterm_dm1"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Type I Diabetes", - "field": "mhterm_dm1", - "remap": lambda x: "Yes Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type I Diabetes", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Type I Diabetes", - "field": "mhterm_dm1", - "remap": lambda x: "Yes Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type I Diabetes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Type II Diabetes", - "vtype": "SingleCategorical", - "method": { - "groups": ["siteid", "mhterm_dm2"], - "value": "record_id", - "func": "count", - }, - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Type II Diabetes", - "field": "mhterm_dm2", - "remap": lambda x: "Yes Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type II Diabetes", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Type II Diabetes", - "field": "mhterm_dm2", - "remap": lambda x: "Yes Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No Type II Diabetes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - } - ], - }, -) - # Overview overviewTransformConfig = ( "compoundTransform", @@ -407,11 +187,11 @@ { "name": "Recruitment Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "recruitment_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -449,11 +229,11 @@ { "name": "FAQ Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "faq_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -491,11 +271,11 @@ { "name": "Screening Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "screening_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -533,11 +313,11 @@ { "name": "Preconsent Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "preconsent_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -575,11 +355,11 @@ { "name": "Consent Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "consent_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -617,11 +397,11 @@ { "name": "Staff Consent Attestation Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "staff_consent_attestation_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -659,11 +439,11 @@ { "name": "Demographics Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "demographics_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -701,11 +481,11 @@ { "name": "Health Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "health_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -743,11 +523,11 @@ { "name": "Substance Use Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "substance_use_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -785,11 +565,11 @@ { "name": "CES-D-10 Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "cesd10_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -827,11 +607,11 @@ { "name": "PAID-5 DM Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "paid5_dm_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -869,11 +649,11 @@ { "name": "Diabetes Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "diabetes_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -911,11 +691,11 @@ { "name": "Dietary Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "dietary_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -953,11 +733,11 @@ { "name": "Opthalmic Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "ophthalmic_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -995,11 +775,11 @@ { "name": "PhenX SDOH Combined Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "px_sdoh_combined_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1037,11 +817,11 @@ { "name": "PhenX Food Insecurity Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "px_food_insecurity_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1079,11 +859,11 @@ { "name": "PhenX Neighborhood Environment Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "px_neighborhood_environment_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1121,14 +901,14 @@ { "name": "PhenX Racial and Ethnic Discrimination Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": [ "siteid", "px_racial_ethnic_discrimination_survey_complete", ], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1166,11 +946,11 @@ { "name": "Decline Participation Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "decline_participation_survey_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1208,11 +988,11 @@ { "name": "Study Enrollment Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "study_enrollment_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1250,11 +1030,11 @@ { "name": "Driving Record", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "driving_record_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1292,11 +1072,11 @@ { "name": "Device Distribution", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "device_distribution_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1334,11 +1114,11 @@ { "name": "Medications Assessment", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "meds_assessment_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1376,11 +1156,11 @@ { "name": "Physical Assessment", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "physical_assessment_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1418,11 +1198,11 @@ { "name": "BCVA", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "bcva_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1460,11 +1240,11 @@ { "name": "Photopic MARS", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "photopic_mars_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1502,11 +1282,11 @@ { "name": "Mesopic MARS", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "mesopic_mars_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1544,11 +1324,11 @@ { "name": "Monofilament", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "monofilament_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1586,11 +1366,11 @@ { "name": "MOCA", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "moca_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1628,11 +1408,11 @@ { "name": "ECG Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "ecg_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1670,11 +1450,11 @@ { "name": "Lab Results Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "lab_results_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1712,11 +1492,11 @@ { "name": "Specimen Management", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "specimen_management_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1754,11 +1534,11 @@ { "name": "Device Return", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "device_return_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1796,11 +1576,11 @@ { "name": "Disposition Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "disposition_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1838,11 +1618,11 @@ { "name": "Data Management Survey", "vtype": "DoubleCategorical", - "method": { + "methods": [{ "groups": ["siteid", "data_management_complete"], "value": "record_id", "func": "count", - }, + }], "accessors": { "filterby": { "name": "Site", @@ -1880,10 +1660,331 @@ ], }, ) + +# Sex & Gender Counts by Site +recruitmentTransformConfig = ( + "simpleTransform", + { + "key": "recruitment", + "strict": True, + "transforms": { + "name": "Recruitment", + "vtype": "DoubleDiscreteTimeseries", + "methods": [{ + "groups": ["siteid", "scrcmpdat", "race"], + "value": "scrcmpdat", + "func": "count", + }], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "datetime": { + "name": "Date", + "field": "scrcmpdat", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "scrcmpdat", + "missing_value": missing_value_generic, + "remap": lambda x: int(datetime.fromisoformat(x["record"]["scrcmpdat"]).strftime("%Y%m%d")), + # key, accessors, name, record + "astype": int, + }, + }, + }, + }, +) + +# Sex & Gender Counts by Site +sexGenderTransformConfig = ( + "simpleTransform", + { + "key": "sex-gender", + "strict": True, + "transforms": { + "name": "Sex & Gender", + "vtype": "DoubleCategorical", + "methods": [{ + "groups": ["siteid", "scrsex", "genderid"], + "value": "record_id", + "func": "count", + }], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Gender", + "field": "genderid", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Gender", + "field": "genderid", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + }, +) + +# Race & Ethnicity Counts by Site +raceEthnicityTransformConfig = ( + "simpleTransform", + { + "key": "race-ethnicity", + "strict": True, + "transforms": { + "name": "Race & Ethnicity", + "vtype": "DoubleCategorical", + "methods": [{ + "groups": ["siteid", "race", "ethnic"], + "value": "record_id", + "func": "count", + }], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Ethnicity", + "field": "ethnic", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Ethnicity", + "field": "ethnic", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + }, +) + +# Phenotypes +phenotypesTransformConfig = ( + "compoundTransform", + { + "key": "phenotype", + "strict": True, + "transforms": [ + { + "name": "Prediabetes", + "vtype": "SingleCategorical", + "methods": [{ + "groups": ["siteid", "mhterm_predm"], + "value": "record_id", + "func": "count", + }], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Prediabetes", + "field": "mhterm_predm", + "remap": lambda x: "Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Prediabetes", + "field": "mhterm_predm", + "remap": lambda x: "Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Type I Diabetes", + "vtype": "SingleCategorical", + "methods": [{ + "groups": ["siteid", "mhterm_dm1"], + "value": "record_id", + "func": "count", + }], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Type I Diabetes", + "field": "mhterm_dm1", + "remap": lambda x: "Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Type I Diabetes", + "field": "mhterm_dm1", + "remap": lambda x: "Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Type II Diabetes", + "vtype": "SingleCategorical", + "methods": [{ + "groups": ["siteid", "mhterm_dm2"], + "value": "record_id", + "func": "count", + }], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Type II Diabetes", + "field": "mhterm_dm2", + "remap": lambda x: "Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Type II Diabetes", + "field": "mhterm_dm2", + "remap": lambda x: "Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + } + ], + }, +) + +currentMedicationsTransformConfig = ( + "simpleTransform", + { + "key": "current-medications", + "strict": True, + "transforms": { + "name": "Current Medications", + "vtype": "SingleCategorical", + "methods": [{ + "groups": ["siteid", "current_medications_complete"], + "value": "record_id", + "func": "count", + }], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Current Medications Status", + "field": "current_medications_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "remap": lambda x: x["name"], + "name": "Current Medications Status", + "field": "current_medications_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Current Medications (N)", + "field": "current_medications", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + }, +) + + transformConfigs = { "redcap": redcapTransformConfig, "overview": overviewTransformConfig, + "recruitment": recruitmentTransformConfig, + "race-ethnicity": raceEthnicityTransformConfig, + "sex-gender": sexGenderTransformConfig, "phenotypes": phenotypesTransformConfig, - "devices": phenotypesTransformConfig, - "recruitment": phenotypesTransformConfig, + "current-medications": currentMedicationsTransformConfig, } diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index ff59b1bb..4bcfc757 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -60,11 +60,17 @@ def __init__( raise ValueError( f"ModuleTransform instantiation missing transforms argument" ) + elif (type(self.transforms) != list) and (type(self.transforms) != dict): + self.valid = False + raise ValueError( + f"ModuleTransform argument transforms must be a list or dict type" + ) + else: + # Transform attribute is there and has one of the correct types (list, dict) + pass - # Normalize Transforms List Type, Check Validity, and Warn on Missing Attributes - self.transformList = ( - self.transforms if (type(self.transforms) == list) else [self.transforms] - ) + # Normalize Transforms to List Type, Check Validity, and Warn on Missing Attributes + self.transformList = self.transforms if type(self.transforms) == list else [self.transforms] for transform in enumerate(self.transformList): self.valid = True if self._transformIsValid(transform) else False if self.strict and not self.valid: @@ -92,9 +98,9 @@ def _transformIsValid(self, transform: Tuple[int, Dict[str, Any]]) -> bool: f"{self.key}:Transform at index {index} in transforms list missing vtype property" ) valid = False - if "method" not in transform: + if "methods" not in transform: self.logger.error( - f"{self.key}:Transform at index {index} in transforms list missing method property" + f"{self.key}:Transform at index {index} in transforms list missing methods property" ) valid = False if "accessors" not in transform: @@ -110,7 +116,7 @@ def _setValueType( name: str, record: Dict[str, Any], key: str, - accessors: List[Dict[str, Dict[str, str | Callable]]], + accessors: Dict[str, Dict[str, str|Callable]], ) -> Any: """ Element-wise type setting method. If value of @@ -118,6 +124,7 @@ def _setValueType( value as the type defined for property in the vtype. """ + print(accessors, "\n") accessor = accessors[key] for pname, _ptype in vtype.props: if pname == key: @@ -169,27 +176,27 @@ def simpleTransform(self, df: pd.DataFrame) -> object: One transform for one VType. """ - transform = self.transformList.pop() - name, _vtype, method, accessors = ( + self.transformed = [] + transform = self.transformList.pop() # simple transforms have only one transform object + name, vtype, methods, accessors = ( transform["name"], - transform["vtype"], - transform["method"], - transform["accessors"], + getattr(vtypes, transform["vtype"])(), + transform["methods"], + transform["accessors"] ) - vtype = getattr(vtypes, _vtype)() - - self.transformed = [] if vtype.isvalid(df, accessors): temp = df[ list(set(accessor["field"] for key, accessor in accessors.items())) ] - groups, value, func = method["groups"], method["value"], method["func"] - grouped = temp.groupby(groups, as_index=False) - transformed = getattr(grouped, func)() + for method in methods: + groups, value, func = method["groups"], method["value"], method["func"] + grouped = temp.groupby(groups, as_index=False) + temp = getattr(grouped, func)() + transformed = temp for record in transformed.to_dict("records"): record = { - key: self._setValueType(vtype, name, record, key, accessor) + key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() } record = {"name": name} | record @@ -220,22 +227,23 @@ def compoundTransform(self, df: pd.DataFrame) -> object: self.transformed = [] for transform in self.transformList: - name, vtype, method, accessors = ( + name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), - transform["method"], + transform["methods"], transform["accessors"] ) if vtype.isvalid(df, accessors): temp = df[ list(set(accessor["field"] for key, accessor in accessors.items())) ] - groups, value, func = method["groups"], method["value"], method["func"] - grouped = temp.groupby(groups, as_index=False) - transformed = getattr(grouped, func)() + for method in methods: + groups, value, func = method["groups"], method["value"], method["func"] + grouped = temp.groupby(groups, as_index=False) + temp = getattr(grouped, func)() + transformed = temp for record in transformed.to_dict("records"): - print(name, record, accessors, "\n") record = { key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() @@ -268,24 +276,26 @@ def mixedTransform(self, df: pd.DataFrame) -> object: self.transformed = {} for transform in self.transformList: - name, vtype, method, accessors = ( + name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), - transform["method"], + transform["methods"], transform["accessors"] ) if vtype.isvalid(df, accessors): temp = df[ list(set(accessor["field"] for key, accessor in accessors.items())) ] - groups, value, func = method["groups"], method["value"], method["func"] - grouped = temp.groupby(groups, as_index=False) - transformed = getattr(grouped, func)() + for method in methods: + groups, value, func = method["groups"], method["value"], method["func"] + grouped = temp.groupby(groups, as_index=False) + temp = getattr(grouped, func)() + transformed = temp subtransform = [] for record in transformed.to_dict("records"): record = { - key: self._setValueType(vtype, name, record, key, accessor) + key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() } record = {"name": name} | record diff --git a/modules/etl/vtypes/categorical.py b/modules/etl/vtypes/categorical.py index 1f5f7870..f414fa3d 100644 --- a/modules/etl/vtypes/categorical.py +++ b/modules/etl/vtypes/categorical.py @@ -2,8 +2,8 @@ class SingleCategorical(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(SingleCategorical, self).__init__( "SingleCategorical", [ ("filterby", str), @@ -16,8 +16,8 @@ def __init__(self: object) -> None: class DoubleCategorical(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(DoubleCategorical, self).__init__( "DoubleCategorical", [ ("filterby", str), diff --git a/modules/etl/vtypes/compound.py b/modules/etl/vtypes/compound.py index 4ca25167..0e69b227 100644 --- a/modules/etl/vtypes/compound.py +++ b/modules/etl/vtypes/compound.py @@ -12,9 +12,9 @@ class Compound(VType): - def __init__(self: object) -> None: + def __init__(self) -> None: raise NotImplementedError - super().__init__( + super(Compound, self).__init__( "Compound", [ SingleCategorical, @@ -31,13 +31,14 @@ def __init__(self: object) -> None: ) def isvalid( - self: object, dfs: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + self, dfs: pd.DataFrame, accessors: List[Dict[str, Dict[str, str]]] ) -> bool: """ Extends the VType.isvalid method to operate on a list of pd.DataFrames and accessors. """ valid = True + accessorsList = [accessors] for accessors in accessorsList: if not super(Compound, self).isvalid(df, accessors): self.validation_errors.append( diff --git a/modules/etl/vtypes/continuous.py b/modules/etl/vtypes/continuous.py index ce80a011..1e1dfdbe 100644 --- a/modules/etl/vtypes/continuous.py +++ b/modules/etl/vtypes/continuous.py @@ -2,8 +2,8 @@ class SingleContinuous(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(SingleContinuous, self).__init__( "SingleContinuous", [("filterby", str), ("group", str), ("color", str), ("x", float)], float, @@ -11,8 +11,8 @@ def __init__(self: object) -> None: class DoubleContinuous(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(DoubleContinuous, self).__init__( "DoubleContinuous", [ ("filterby", str), diff --git a/modules/etl/vtypes/discrete.py b/modules/etl/vtypes/discrete.py index 429d81bc..d768dffa 100644 --- a/modules/etl/vtypes/discrete.py +++ b/modules/etl/vtypes/discrete.py @@ -2,8 +2,8 @@ class SingleDiscrete(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(SingleDiscrete, self).__init__( "SingleDiscrete", [("filterby", str), ("group", str), ("color", str), ("x", int)], int, @@ -11,8 +11,8 @@ def __init__(self: object) -> None: class DoubleDiscrete(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(DoubleDiscrete, self).__init__( "Discrete", [ ("filterby", str), diff --git a/modules/etl/vtypes/mixed.py b/modules/etl/vtypes/mixed.py index 35a1b842..e0e5969e 100644 --- a/modules/etl/vtypes/mixed.py +++ b/modules/etl/vtypes/mixed.py @@ -13,9 +13,9 @@ class Mixed(VType): - def __init__(self: object) -> None: + def __init__(self) -> None: raise NotImplementedError - super().__init__( + super(Mixed, self).__init__( "Mixed", [ SingleCategorical, @@ -33,7 +33,7 @@ def __init__(self: object) -> None: ) def isvalid( - self: object, dfs: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + self, dfs: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] ) -> bool: """ Extends the VType.isvalid method to operate on a list diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index 939db07b..6a8f3967 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -4,8 +4,8 @@ class SingleTimeseries(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(SingleTimeseries, self).__init__( "SingleTimeseries", [("filterby", str), ("subgroup", str), ("color", str), ("datetime", datetime)], pd._libs.tslibs.nattype.NaTType, @@ -13,8 +13,8 @@ def __init__(self: object) -> None: class DoubleDiscreteTimeseries(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(DoubleDiscreteTimeseries, self).__init__( "DoubleDiscreteTimeseries", [ ("filterby", str), @@ -28,8 +28,8 @@ def __init__(self: object) -> None: class DoubleContinuousTimeseries(VType): - def __init__(self: object) -> None: - super().__init__( + def __init__(self) -> None: + super(DoubleContinuousTimeseries, self).__init__( "DoubleContinuousTimeseries", [ ("filterby", str), diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index 2e3a1a9e..de097001 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -5,7 +5,7 @@ class VType(object): def __init__( - self: object, + self, name: str, props: List[Tuple[str, Callable]], missing_value: Callable, From 794bc996ca2fdab8430decda0a9db634a9811ee0 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 15 Nov 2023 04:11:22 +0000 Subject: [PATCH 371/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- caching/__init__.py | 12 +- config.py | 2 + modules/etl/config/aireadi_config.py | 548 ++++++++++++--------- modules/etl/transforms/module_transform.py | 44 +- modules/etl/transforms/redcap_transform.py | 12 +- modules/etl/vtypes/timeseries.py | 7 +- modules/etl/vtypes/vtype.py | 4 +- 7 files changed, 375 insertions(+), 254 deletions(-) diff --git a/caching/__init__.py b/caching/__init__.py index 671f11ba..fc85c7e6 100644 --- a/caching/__init__.py +++ b/caching/__init__.py @@ -1,8 +1,10 @@ from flask_caching import Cache from config import config -cache = Cache(config = { - key.replace(f"FAIRHUB_", ""): value - for key, value in config.items() - if "CACHE" in key -}) +cache = Cache( + config={ + key.replace(f"FAIRHUB_", ""): value + for key, value in config.items() + if "CACHE" in key + } +) diff --git a/config.py b/config.py index a58920c8..fa1efb78 100644 --- a/config.py +++ b/config.py @@ -10,10 +10,12 @@ # Load environment variables from .env config = dotenv_values(".env") + def get_env(key): """Return environment variable from .env or native environment.""" return config.get(key) if LOCAL_ENV_FILE else environ.get(key) + FLASK_APP = get_env("FLASK_APP") FLASK_DEBUG = get_env("FLASK_DEBUG") FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 7dcb6cd8..cbb668a4 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -187,11 +187,13 @@ { "name": "Recruitment Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "recruitment_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "recruitment_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -229,11 +231,13 @@ { "name": "FAQ Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "faq_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "faq_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -271,11 +275,13 @@ { "name": "Screening Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "screening_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "screening_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -313,11 +319,13 @@ { "name": "Preconsent Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "preconsent_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -355,11 +363,13 @@ { "name": "Consent Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "consent_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -397,11 +407,16 @@ { "name": "Staff Consent Attestation Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "staff_consent_attestation_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -439,11 +454,13 @@ { "name": "Demographics Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "demographics_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -481,11 +498,13 @@ { "name": "Health Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "health_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -523,11 +542,13 @@ { "name": "Substance Use Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "substance_use_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -565,11 +586,13 @@ { "name": "CES-D-10 Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "cesd10_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -607,11 +630,13 @@ { "name": "PAID-5 DM Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "paid5_dm_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -649,11 +674,13 @@ { "name": "Diabetes Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "diabetes_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -691,11 +718,13 @@ { "name": "Dietary Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "dietary_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -733,11 +762,13 @@ { "name": "Opthalmic Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "ophthalmic_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -775,11 +806,13 @@ { "name": "PhenX SDOH Combined Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "px_sdoh_combined_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -817,11 +850,13 @@ { "name": "PhenX Food Insecurity Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "px_food_insecurity_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -859,11 +894,16 @@ { "name": "PhenX Neighborhood Environment Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "px_neighborhood_environment_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -901,14 +941,16 @@ { "name": "PhenX Racial and Ethnic Discrimination Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": [ - "siteid", - "px_racial_ethnic_discrimination_survey_complete", - ], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -946,11 +988,13 @@ { "name": "Decline Participation Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "decline_participation_survey_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "decline_participation_survey_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -988,11 +1032,13 @@ { "name": "Study Enrollment Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "study_enrollment_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "study_enrollment_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1030,11 +1076,13 @@ { "name": "Driving Record", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "driving_record_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "driving_record_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1072,11 +1120,13 @@ { "name": "Device Distribution", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "device_distribution_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1114,11 +1164,13 @@ { "name": "Medications Assessment", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "meds_assessment_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1156,11 +1208,13 @@ { "name": "Physical Assessment", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "physical_assessment_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "physical_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1198,11 +1252,13 @@ { "name": "BCVA", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "bcva_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1240,11 +1296,13 @@ { "name": "Photopic MARS", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "photopic_mars_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1282,11 +1340,13 @@ { "name": "Mesopic MARS", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "mesopic_mars_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "mesopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1324,11 +1384,13 @@ { "name": "Monofilament", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "monofilament_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "monofilament_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1366,11 +1428,13 @@ { "name": "MOCA", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "moca_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "moca_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1408,11 +1472,13 @@ { "name": "ECG Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "ecg_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "ecg_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1450,11 +1516,13 @@ { "name": "Lab Results Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "lab_results_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "lab_results_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1492,11 +1560,13 @@ { "name": "Specimen Management", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "specimen_management_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "specimen_management_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1534,11 +1604,13 @@ { "name": "Device Return", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "device_return_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "device_return_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1576,11 +1648,13 @@ { "name": "Disposition Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "disposition_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "disposition_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1618,11 +1692,13 @@ { "name": "Data Management Survey", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "data_management_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "data_management_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1670,11 +1746,13 @@ "transforms": { "name": "Recruitment", "vtype": "DoubleDiscreteTimeseries", - "methods": [{ - "groups": ["siteid", "scrcmpdat", "race"], - "value": "scrcmpdat", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "scrcmpdat", "race"], + "value": "scrcmpdat", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1703,7 +1781,11 @@ "name": "Cumulative Count (N)", "field": "scrcmpdat", "missing_value": missing_value_generic, - "remap": lambda x: int(datetime.fromisoformat(x["record"]["scrcmpdat"]).strftime("%Y%m%d")), + "remap": lambda x: int( + datetime.fromisoformat(x["record"]["scrcmpdat"]).strftime( + "%Y%m%d" + ) + ), # key, accessors, name, record "astype": int, }, @@ -1721,11 +1803,13 @@ "transforms": { "name": "Sex & Gender", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "scrsex", "genderid"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "scrsex", "genderid"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1770,11 +1854,13 @@ "transforms": { "name": "Race & Ethnicity", "vtype": "DoubleCategorical", - "methods": [{ - "groups": ["siteid", "race", "ethnic"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "race", "ethnic"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1821,11 +1907,13 @@ { "name": "Prediabetes", "vtype": "SingleCategorical", - "methods": [{ - "groups": ["siteid", "mhterm_predm"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "mhterm_predm"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1836,14 +1924,18 @@ "group": { "name": "Prediabetes", "field": "mhterm_predm", - "remap": lambda x: "Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "remap": lambda x: "Prediabetes" + if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" + else "No", "missing_value": missing_value_generic, "astype": str, }, "color": { "name": "Prediabetes", "field": "mhterm_predm", - "remap": lambda x: "Prediabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "remap": lambda x: "Prediabetes" + if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" + else "No", "missing_value": missing_value_generic, "astype": str, }, @@ -1858,11 +1950,13 @@ { "name": "Type I Diabetes", "vtype": "SingleCategorical", - "methods": [{ - "groups": ["siteid", "mhterm_dm1"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "mhterm_dm1"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1873,14 +1967,18 @@ "group": { "name": "Type I Diabetes", "field": "mhterm_dm1", - "remap": lambda x: "Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "remap": lambda x: "Type I Diabetes" + if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" + else "No", "missing_value": missing_value_generic, "astype": str, }, "color": { "name": "Type I Diabetes", "field": "mhterm_dm1", - "remap": lambda x: "Type I Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "remap": lambda x: "Type I Diabetes" + if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" + else "No", "missing_value": missing_value_generic, "astype": str, }, @@ -1895,11 +1993,13 @@ { "name": "Type II Diabetes", "vtype": "SingleCategorical", - "methods": [{ - "groups": ["siteid", "mhterm_dm2"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "mhterm_dm2"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", @@ -1910,14 +2010,18 @@ "group": { "name": "Type II Diabetes", "field": "mhterm_dm2", - "remap": lambda x: "Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "remap": lambda x: "Type II Diabetes" + if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" + else "No", "missing_value": missing_value_generic, "astype": str, }, "color": { "name": "Type II Diabetes", "field": "mhterm_dm2", - "remap": lambda x: "Type II Diabetes" if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" else "No", + "remap": lambda x: "Type II Diabetes" + if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" + else "No", "missing_value": missing_value_generic, "astype": str, }, @@ -1928,7 +2032,7 @@ "astype": int, }, }, - } + }, ], }, ) @@ -1941,11 +2045,13 @@ "transforms": { "name": "Current Medications", "vtype": "SingleCategorical", - "methods": [{ - "groups": ["siteid", "current_medications_complete"], - "value": "record_id", - "func": "count", - }], + "methods": [ + { + "groups": ["siteid", "current_medications_complete"], + "value": "record_id", + "func": "count", + } + ], "accessors": { "filterby": { "name": "Site", diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index 4bcfc757..d492d30d 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -70,7 +70,9 @@ def __init__( pass # Normalize Transforms to List Type, Check Validity, and Warn on Missing Attributes - self.transformList = self.transforms if type(self.transforms) == list else [self.transforms] + self.transformList = ( + self.transforms if type(self.transforms) == list else [self.transforms] + ) for transform in enumerate(self.transformList): self.valid = True if self._transformIsValid(transform) else False if self.strict and not self.valid: @@ -116,7 +118,7 @@ def _setValueType( name: str, record: Dict[str, Any], key: str, - accessors: Dict[str, Dict[str, str|Callable]], + accessors: Dict[str, Dict[str, str | Callable]], ) -> Any: """ Element-wise type setting method. If value of @@ -141,13 +143,15 @@ def _setValueType( # Accessor Name pvalue = record[accessor["field"]] if "remap" in accessor and accessor["remap"] is not None: - pvalue = accessor["remap"]({ - "name": name, - "record": record, - "value": pvalue, - "key": key, - "accessors": accessors, - }) + pvalue = accessor["remap"]( + { + "name": name, + "record": record, + "value": pvalue, + "key": key, + "accessors": accessors, + } + ) if pvalue != accessor["missing_value"]: try: pvalue = ptype(pvalue) @@ -177,12 +181,14 @@ def simpleTransform(self, df: pd.DataFrame) -> object: One transform for one VType. """ self.transformed = [] - transform = self.transformList.pop() # simple transforms have only one transform object + transform = ( + self.transformList.pop() + ) # simple transforms have only one transform object name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), transform["methods"], - transform["accessors"] + transform["accessors"], ) if vtype.isvalid(df, accessors): temp = df[ @@ -231,14 +237,18 @@ def compoundTransform(self, df: pd.DataFrame) -> object: transform["name"], getattr(vtypes, transform["vtype"])(), transform["methods"], - transform["accessors"] + transform["accessors"], ) if vtype.isvalid(df, accessors): temp = df[ list(set(accessor["field"] for key, accessor in accessors.items())) ] for method in methods: - groups, value, func = method["groups"], method["value"], method["func"] + groups, value, func = ( + method["groups"], + method["value"], + method["func"], + ) grouped = temp.groupby(groups, as_index=False) temp = getattr(grouped, func)() transformed = temp @@ -280,14 +290,18 @@ def mixedTransform(self, df: pd.DataFrame) -> object: transform["name"], getattr(vtypes, transform["vtype"])(), transform["methods"], - transform["accessors"] + transform["accessors"], ) if vtype.isvalid(df, accessors): temp = df[ list(set(accessor["field"] for key, accessor in accessors.items())) ] for method in methods: - groups, value, func = method["groups"], method["value"], method["func"] + groups, value, func = ( + method["groups"], + method["value"], + method["func"], + ) grouped = temp.groupby(groups, as_index=False) temp = getattr(grouped, func)() transformed = temp diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 93cf34d5..3c9e3cf6 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -273,9 +273,7 @@ def _drop_columns( return df @classmethod - def drop_columns( - self, df: pd.DataFrame, columns: List[str] - ) -> pd.DataFrame: + def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: """ Drop columns from pd.DataFrame. """ @@ -298,9 +296,7 @@ def _keep_columns( return df @classmethod - def keep_columns( - self, df: pd.DataFrame, columns: List[str] - ) -> pd.DataFrame: + def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: """ Keep only selected columns in pd.DataFrame. """ @@ -634,9 +630,7 @@ def _resolve_columns_with_dataframe( return resolved_columns # Extract REDCap Type Metadata - def _get_redcap_type_metadata( - self, df: pd.DataFrame - ) -> List[Dict[str, Any]]: + def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: """ Extracts REDCap field name, type, and options (the metadata) for each column in the target pd.DataFrame diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index 6a8f3967..ce587c95 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -7,7 +7,12 @@ class SingleTimeseries(VType): def __init__(self) -> None: super(SingleTimeseries, self).__init__( "SingleTimeseries", - [("filterby", str), ("subgroup", str), ("color", str), ("datetime", datetime)], + [ + ("filterby", str), + ("subgroup", str), + ("color", str), + ("datetime", datetime), + ], pd._libs.tslibs.nattype.NaTType, ) diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index de097001..bf419886 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -19,9 +19,7 @@ def __init__( def __str__(self): return f"{self.__dict__}" - def isvalid( - self, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]] - ) -> bool: + def isvalid(self, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]]) -> bool: columns = df.columns for pname, ptype in self.props: if pname in accessors.keys(): From 2bff4b295a5a9d17c586989ca4daec2b2d63a9b1 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 14 Nov 2023 21:28:31 -0800 Subject: [PATCH 372/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20etl=20config?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/config/aireadi_config.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 7dcb6cd8..b4642ea6 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -116,7 +116,7 @@ redcapTransformConfig = { "reports": [ ( - "participant_values", + "participant-value", {"report_id": 242544}, [ ("remap_values_by_columns", {"columns": data_columns}), @@ -125,7 +125,7 @@ ], ), ( - "instrument_statuses", + "instrument-status", {"report_id": 251954}, [ ( @@ -137,7 +137,7 @@ ], ), ( - "repeat_instruments", + "repeat_instrument", {"report_id": 259920}, [ ("drop_rows", {"columns": repeat_survey_columns}), @@ -153,11 +153,11 @@ ), ], "merge_transformed_reports": ( - "participant_values", + "participant-value", [ - ("instrument_statuses", {"on": index_columns, "how": "inner"}), + ("instrument-status", {"on": index_columns, "how": "inner"}), ( - "repeat_instruments", + "repeat_instrument", {"on": index_columns, "how": "outer"}, ), ], From 011d1a6dcf5283fc9f852a647243965e15bc3ec2 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Tue, 21 Nov 2023 15:28:45 -0800 Subject: [PATCH 373/505] =?UTF-8?q?feat:=20=E2=9C=A8=20=20add=20test=20fil?= =?UTF-8?q?e=20for=20version=20steps=20(#29)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add dataset version test * fix: confest.py password change * style: 🎨 fix code style issues with Black * fix: dataset & its metadata http request status code * fix: study metadata http statuses fixed * fix: study status test * style: 🎨 fix code style issues with Black * fix: password was reverted * fix: delete endpoint status * fix: delete endpoints * fix: study dataset test * fix: minor dataset typo fixes * fix: delete endpoints * fix: dataset relate item test functions * style: 🎨 fix code style issues with Black * fix: restored some study metadata fields * fix: study version minimised endpoint * style: format * style: 🎨 fix code style issues with Black --------- Co-authored-by: Lint Action --- apis/authentication.py | 1 - apis/contributor.py | 8 +- apis/dataset.py | 144 +++--- apis/dataset_metadata/dataset_access.py | 4 +- .../dataset_alternate_identifier.py | 8 +- apis/dataset_metadata/dataset_consent.py | 4 +- apis/dataset_metadata/dataset_contributor.py | 17 +- apis/dataset_metadata/dataset_date.py | 8 +- .../dataset_de_ident_level.py | 4 +- apis/dataset_metadata/dataset_description.py | 8 +- apis/dataset_metadata/dataset_funder.py | 8 +- apis/dataset_metadata/dataset_other.py | 4 +- apis/dataset_metadata/dataset_record_keys.py | 4 +- apis/dataset_metadata/dataset_related_item.py | 19 +- apis/dataset_metadata/dataset_rights.py | 8 +- apis/dataset_metadata/dataset_subject.py | 8 +- apis/dataset_metadata/dataset_title.py | 10 +- apis/study.py | 12 +- apis/study_metadata/study_arm.py | 8 +- apis/study_metadata/study_available_ipd.py | 8 +- apis/study_metadata/study_contact.py | 8 +- apis/study_metadata/study_description.py | 4 +- apis/study_metadata/study_design.py | 4 +- apis/study_metadata/study_eligibility.py | 12 +- apis/study_metadata/study_identification.py | 8 +- apis/study_metadata/study_intervention.py | 8 +- apis/study_metadata/study_ipdsharing.py | 4 +- apis/study_metadata/study_link.py | 8 +- apis/study_metadata/study_location.py | 8 +- apis/study_metadata/study_other.py | 12 +- apis/study_metadata/study_overall_official.py | 8 +- apis/study_metadata/study_reference.py | 8 +- .../study_sponsors_collaborators.py | 8 +- apis/study_metadata/study_status.py | 4 +- model/dataset_metadata/dataset_contributor.py | 2 +- model/dataset_metadata/dataset_other.py | 1 - model/study_metadata/study_eligibility.py | 5 +- model/study_metadata/study_location.py | 1 - .../study_metadata/study_overall_official.py | 2 +- .../study_sponsors_collaborators.py | 2 +- tests/conftest.py | 5 +- tests/functional/test_study_api.py | 10 +- tests/functional/test_study_dataset_api.py | 11 +- .../test_study_dataset_metadata_api.py | 174 +++++-- tests/functional/test_study_metadata_api.py | 90 +++- tests/functional/test_study_version_api.py | 466 ++++++++++++++++++ 46 files changed, 896 insertions(+), 272 deletions(-) create mode 100644 tests/functional/test_study_version_api.py diff --git a/apis/authentication.py b/apis/authentication.py index eec8339b..3d9c9758 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -40,7 +40,6 @@ class UnauthenticatedException(Exception): """Exception raised when a user is not authenticated.""" - # TODO: Implement this exception pass diff --git a/apis/contributor.py b/apis/contributor.py index 8208f79f..b0062b1b 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,7 +1,7 @@ from collections import OrderedDict from typing import Any, Dict, List, Union -from flask import g, request +from flask import g, request, Response from flask_restx import Namespace, Resource, fields import model @@ -155,7 +155,7 @@ def delete(self, study_id: int, user_id: str): model.db.session.commit() - return 204 + return Response(status=204) user = model.User.query.get(user_id) @@ -177,7 +177,7 @@ def delete(self, study_id: int, user_id: str): return "you must transfer ownership before removing yourself", 422 model.db.session.delete(grantee) model.db.session.commit() - return 204 + return Response(status=204) if not is_granted("delete_contributor", study): return ( "Access denied, you are not authorized to change this permission", @@ -188,7 +188,7 @@ def delete(self, study_id: int, user_id: str): return f"User cannot delete {grantee.permission}", 403 model.db.session.delete(grantee) model.db.session.commit() - return 204 + return Response(status=204) @api.route("/study//contributor/owner/") diff --git a/apis/dataset.py b/apis/dataset.py index ff933fda..f828e482 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,6 +1,6 @@ import typing -from flask import jsonify, request +from flask import jsonify, request, Response from flask_restx import Namespace, Resource, fields import model @@ -47,7 +47,7 @@ class DatasetList(Resource): def get(self, study_id): study = model.Study.query.get(study_id) datasets = model.Dataset.query.filter_by(study=study) - return [d.to_dict() for d in datasets] + return [d.to_dict() for d in datasets], 200 @api.response(201, "Success") @api.response(400, "Validation Error") @@ -72,7 +72,7 @@ def post(self, study_id): description_element.description = data["description"] model.db.session.commit() - return dataset_.to_dict() + return dataset_.to_dict(), 201 # TODO not finalized endpoint. have to set functionality @@ -85,7 +85,7 @@ class DatasetResource(Resource): @api.response(400, "Validation Error") def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument data_obj = model.Dataset.query.get(dataset_id) - return data_obj.to_dict() + return data_obj.to_dict(), 200 @api.response(201, "Success") @api.response(400, "Validation Error") @@ -102,7 +102,7 @@ def put(self, study_id: int, dataset_id: int): data_obj.update(data) model.db.session.commit() - return data_obj.to_dict() + return data_obj.to_dict(), 200 @api.response(200, "Success") @api.response(400, "Validation Error") @@ -118,7 +118,7 @@ def delete(self, study_id: int, dataset_id: int): model.db.session.delete(data_obj) model.db.session.commit() - return 204 + return Response(status=204) @api.route("/study//dataset//version/") @@ -133,7 +133,7 @@ def get( if not is_granted("version", study): return "Access denied, you can not modify", 403 dataset_version = model.Version.query.get(version_id) - return dataset_version.to_dict() + return dataset_version.to_dict(), 200 @api.response(201, "Success") @api.response(400, "Validation Error") @@ -147,7 +147,7 @@ def put( data_version_obj = model.Version.query.get(version_id) data_version_obj.update(request.json) model.db.session.commit() - return jsonify(data_version_obj.to_dict()), 201 + return jsonify(data_version_obj.to_dict()), 200 @api.response(201, "Success") @api.response(400, "Validation Error") @@ -161,67 +161,7 @@ def delete( version_obj = model.Version.query.get(version_id) model.db.session.delete(version_obj) model.db.session.commit() - return 204 - - -@api.route("/study//dataset//version//changelog") -class VersionDatasetChangelog(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version changelog") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return {"changelog": version.changelog} - - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version changelog update") - def put( - self, study_id: str, dataset_id: str, version_id: str - ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - data: typing.Union[typing.Any, dict] = request.json - version_ = model.Version.query.get(version_id) - version_.changelog = data["changelog"] - model.db.session.commit() - return 201 - - -@api.route("/study//dataset//version//readme") -class VersionDatasetReadme(Resource): - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version readme") - def get(self, study_id: str, dataset_id: str, version_id: str): - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - version = model.Version.query.filter_by( - id=version_id, dataset_id=dataset_id - ).one_or_none() - return version.version_readme.to_dict(), 200 - - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.doc("version readme update") - def put( - self, study_id: str, dataset_id: str, version_id: str - ): # pylint: disable= unused-argument - study = model.Study.query.get(study_id) - if not is_granted("version", study): - return "Access denied, you can not modify", 403 - data = request.json - version_ = model.Version.query.get(version_id) - version_.version_readme.update(data) - model.db.session.commit() - return 201 + return Response(status=204) @api.route("/study//dataset//version") @@ -252,7 +192,7 @@ def post(self, study_id: int, dataset_id: int): dataset_versions = model.Version.from_data(data_obj, data) model.db.session.add(dataset_versions) model.db.session.commit() - return dataset_versions.to_dict() + return dataset_versions.to_dict(), 201 # @api.route("/study//dataset//version//publish") @@ -282,7 +222,7 @@ def get(self, study_id: str, dataset_id: str, version_id: str): version = model.Version.query.filter_by( id=version_id, dataset_id=dataset_id ).one_or_none() - return version.dataset.study.to_dict_study_metadata() + return version.dataset.study.to_dict_study_metadata(), 200 @api.route( @@ -299,4 +239,64 @@ def get(self, study_id: str, dataset_id: str, version_id: str): version = model.Version.query.filter_by( id=version_id, dataset_id=dataset_id ).one_or_none() - return version.dataset.to_dict_dataset_metadata() + return version.dataset.to_dict_dataset_metadata(), 200 + + +@api.route("/study//dataset//version//changelog") +class VersionDatasetChangelog(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version changelog") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return {"changelog": version.changelog}, 200 + + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version changelog update") + def put( + self, study_id: str, dataset_id: str, version_id: str + ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + data: typing.Union[typing.Any, dict] = request.json + version_ = model.Version.query.get(version_id) + version_.changelog = data["changelog"] + model.db.session.commit() + return version_.changelog, 200 + + +@api.route("/study//dataset//version//readme") +class VersionDatasetReadme(Resource): + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version readme") + def get(self, study_id: str, dataset_id: str, version_id: str): + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + version = model.Version.query.filter_by( + id=version_id, dataset_id=dataset_id + ).one_or_none() + return version.version_readme.to_dict(), 200 + + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.doc("version readme update") + def put( + self, study_id: str, dataset_id: str, version_id: str + ): # pylint: disable= unused-argument + study = model.Study.query.get(study_id) + if not is_granted("version", study): + return "Access denied, you can not modify", 403 + data = request.json + version_ = model.Version.query.get(version_id) + version_.version_readme.update(data) + model.db.session.commit() + return version_.version_readme.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py index d9e79d1e..4bf52f39 100644 --- a/apis/dataset_metadata/dataset_access.py +++ b/apis/dataset_metadata/dataset_access.py @@ -32,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset access""" dataset_ = model.Dataset.query.get(dataset_id) dataset_access_ = dataset_.dataset_access - return dataset_access_.to_dict() + return dataset_access_.to_dict(), 200 @api.doc("update access") @api.response(200, "Success") @@ -69,4 +69,4 @@ def put(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_access.update(request.json) model.db.session.commit() - return dataset_.dataset_access.to_dict() + return dataset_.dataset_access.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 745be6c9..d2c6049e 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,7 +1,7 @@ """API endpoints for dataset alternate identifier""" from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -32,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argum """Get dataset alternate identifier""" dataset_ = model.Dataset.query.get(dataset_id) dataset_identifier_ = dataset_.dataset_alternate_identifier - return [d.to_dict() for d in dataset_identifier_] + return [d.to_dict() for d in dataset_identifier_], 200 @api.doc("update identifier") @api.response(200, "Success") @@ -110,7 +110,7 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_identifier_) list_of_elements.append(dataset_identifier_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route( "/study//dataset//" @@ -133,4 +133,4 @@ def delete( model.db.session.delete(dataset_identifier_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index 3487d772..a033273f 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -34,7 +34,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset consent""" dataset_ = model.Dataset.query.get(dataset_id) dataset_consent_ = dataset_.dataset_consent - return dataset_consent_.to_dict() + return dataset_consent_.to_dict(), 200 @api.doc("update consent") @api.response(200, "Success") @@ -80,4 +80,4 @@ def put(self, study_id: int, dataset_id: int): dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_consent.update(data) model.db.session.commit() - return dataset_.dataset_consent.to_dict() + return dataset_.dataset_consent.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py index 64394d6e..6e8e457c 100644 --- a/apis/dataset_metadata/dataset_contributor.py +++ b/apis/dataset_metadata/dataset_contributor.py @@ -1,7 +1,7 @@ """API for dataset contributor metadata""" from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource from jsonschema import ValidationError, validate @@ -28,7 +28,9 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume dataset_ = model.Dataset.query.get(dataset_id) dataset_contributor_ = dataset_.dataset_contributors - return [d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"]] + return [ + d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"] + ], 200 @api.doc("update contributor") @api.response(200, "Success") @@ -129,7 +131,7 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_contributor_) list_of_elements.append(dataset_contributor_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route( @@ -156,7 +158,7 @@ def delete( model.db.session.delete(contributor_) model.db.session.commit() - return 204 + return Response(status=204) @api.route("/study//dataset//metadata/creator") @@ -171,7 +173,8 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset creator""" dataset_ = model.Dataset.query.get(dataset_id) dataset_creator_ = dataset_.dataset_contributors - return [d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"]] + # TODO d.creator + return [d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"]], 200 @api.doc("update creator") @api.response(200, "Success") @@ -269,7 +272,7 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_creator_) list_of_elements.append(dataset_creator_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//dataset//metadata/creator/") @@ -291,4 +294,4 @@ def delete( model.db.session.delete(dataset_creator_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index d687f96c..11af423e 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,7 +1,7 @@ """APIs for dataset date metadata""" from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -32,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset date""" dataset_ = model.Dataset.query.get(dataset_id) dataset_date_ = dataset_.dataset_date - return [d.to_dict() for d in dataset_date_] + return [d.to_dict() for d in dataset_date_], 200 @api.doc("update date") @api.response(200, "Success") @@ -87,7 +87,7 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_date_) list_of_elements.append(dataset_date_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//dataset//metadata/date/") @@ -108,4 +108,4 @@ def delete( model.db.session.delete(date_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py index bb58d8f1..a9f7c7f5 100644 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ b/apis/dataset_metadata/dataset_de_ident_level.py @@ -35,7 +35,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset de-identification level""" dataset_ = model.Dataset.query.get(dataset_id) de_ident_level_ = dataset_.dataset_de_ident_level - return de_ident_level_.to_dict() + return de_ident_level_.to_dict(), 200 @api.doc("update ident level") @api.response(200, "Success") @@ -81,4 +81,4 @@ def put(self, study_id: int, dataset_id: int): dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_de_ident_level.update(data) model.db.session.commit() - return dataset_.dataset_de_ident_level.to_dict() + return dataset_.dataset_de_ident_level.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 3f86f022..6e45d9be 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -2,7 +2,7 @@ from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -32,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset description""" dataset_ = model.Dataset.query.get(dataset_id) dataset_description_ = dataset_.dataset_description - return [d.to_dict() for d in dataset_description_] + return [d.to_dict() for d in dataset_description_], 200 @api.doc("update description") @api.response(200, "Success") @@ -100,7 +100,7 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_description_) list_of_elements.append(dataset_description_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route( "/study//dataset//" @@ -134,4 +134,4 @@ def delete( model.db.session.delete(dataset_description_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 0bdb5617..5001bd18 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,7 +1,7 @@ """API endpoints for dataset funder""" from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -36,7 +36,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset funder""" dataset_ = model.Dataset.query.get(dataset_id) dataset_funder_ = dataset_.dataset_funder - return [d.to_dict() for d in dataset_funder_] + return [d.to_dict() for d in dataset_funder_], 200 @api.doc("update funder") @api.response(200, "Success") @@ -96,7 +96,7 @@ def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argum model.db.session.add(dataset_funder_) list_of_elements.append(dataset_funder_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//dataset//metadata/funder/") @@ -121,4 +121,4 @@ def delete( model.db.session.delete(dataset_funder_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 25417dc0..633d4ade 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -93,7 +93,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset publisher metadata""" dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other - return dataset_other_.to_dict() + return dataset_other_.to_dict(), 200 @api.doc("update publisher") @api.response(200, "Success") @@ -131,4 +131,4 @@ def put(self, study_id: int, dataset_id: int): dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_other.update(data) model.db.session.commit() - return dataset_.dataset_other.to_dict() + return dataset_.dataset_other.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index d738082e..8353463a 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -30,7 +30,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume dataset_ = model.Dataset.query.get(dataset_id) dataset_record_keys_ = dataset_.dataset_record_keys - return dataset_record_keys_.to_dict() + return dataset_record_keys_.to_dict(), 200 @api.doc("update record keys") @api.response(200, "Success") @@ -66,4 +66,4 @@ def put(self, study_id: int, dataset_id: int): dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_record_keys.update(data) model.db.session.commit() - return dataset_.dataset_record_keys.to_dict(), 201 + return dataset_.dataset_record_keys.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index da3defdd..3f889378 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,7 +1,7 @@ """API for dataset related item""" from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -31,7 +31,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset related item""" dataset_ = model.Dataset.query.get(dataset_id) dataset_related_item_ = dataset_.dataset_related_item - return [d.to_dict() for d in dataset_related_item_] + return [d.to_dict() for d in dataset_related_item_], 200 @api.doc("update related item") @api.response(200, "Success") @@ -327,7 +327,7 @@ def delete( model.db.session.delete(dataset_related_item_) model.db.session.commit() - return 204 + return Response(status=204) @api.route( @@ -357,7 +357,7 @@ def delete( model.db.session.delete(dataset_contributors_) model.db.session.commit() - return 204 + return Response(status=204) @api.route( @@ -382,9 +382,14 @@ def delete( if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 dataset_title_ = model.DatasetRelatedItemTitle.query.get(title_id) + if dataset_title_.type == "MainTitle": + return ( + "Main Title type can not be deleted", + 403, + ) model.db.session.delete(dataset_title_) model.db.session.commit() - return 204 + return Response(status=204) @api.route( @@ -413,7 +418,7 @@ def delete( ) model.db.session.delete(dataset_identifier_) model.db.session.commit() - return 204 + return Response(status=204) @api.route( @@ -440,4 +445,4 @@ def delete( dataset_creator_ = model.DatasetRelatedItemContributor.query.get(creator_id) model.db.session.delete(dataset_creator_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index e0b28fa2..194cde6e 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -2,7 +2,7 @@ from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -35,7 +35,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset rights""" dataset_ = model.Dataset.query.get(dataset_id) dataset_rights_ = dataset_.dataset_rights - return [d.to_dict() for d in dataset_rights_] + return [d.to_dict() for d in dataset_rights_], 200 @api.doc("update rights") @api.response(200, "Success") @@ -84,7 +84,7 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_rights_) list_of_elements.append(dataset_rights_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//dataset//metadata/rights/") @@ -109,4 +109,4 @@ def delete( model.db.session.delete(dataset_rights_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 1a8f1740..37fbed20 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -2,7 +2,7 @@ from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -36,7 +36,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset subject""" dataset_ = model.Dataset.query.get(dataset_id) dataset_subject_ = dataset_.dataset_subject - return [d.to_dict() for d in dataset_subject_] + return [d.to_dict() for d in dataset_subject_], 200 @api.doc("update subject") @api.response(200, "Success") @@ -92,7 +92,7 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_subject_) list_of_elements.append(dataset_subject_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//dataset//metadata/subject/") @@ -117,4 +117,4 @@ def delete( model.db.session.delete(dataset_subject_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 23793e7e..1b1ed313 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,7 +1,7 @@ """API for dataset title metadata""" from typing import Any, Union -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -32,7 +32,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset title""" dataset_ = model.Dataset.query.get(dataset_id) dataset_title_ = dataset_.dataset_title - return [d.to_dict() for d in dataset_title_] + return [d.to_dict() for d in dataset_title_], 200 @api.doc("update title") @api.response(200, "Success") @@ -94,10 +94,10 @@ def post(self, study_id: int, dataset_id: int): model.db.session.add(dataset_title_) list_of_elements.append(dataset_title_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//dataset//metadata/title/") - class DatasetDescriptionUpdate(Resource): + class DatasetTitleDelete(Resource): """Dataset Title Update Resource""" @api.doc("delete title") @@ -124,4 +124,4 @@ def delete( ) model.db.session.delete(dataset_title_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study.py b/apis/study.py index 34186e0f..42cc0f4d 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,7 +1,7 @@ """APIs for study operations""" "" from typing import Any, Union -from flask import g, request +from flask import g, request, Response from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate @@ -51,7 +51,7 @@ def get(self): studies = model.Study.query.filter(model.Study.id.in_(study_ids)).all() - return [s.to_dict() for s in studies] + return [s.to_dict() for s in studies], 200 @api.expect(study_model) @api.response(200, "Success") @@ -88,7 +88,7 @@ def post(self): model.db.session.commit() - return study_.to_dict() + return study_.to_dict(), 201 @api.route("/study/") @@ -103,7 +103,7 @@ def get(self, study_id: int): """Return a study's details""" study1 = model.Study.query.get(study_id) - return study1.to_dict() + return study1.to_dict(), 200 @api.expect(study_model) @api.response(200, "Success") @@ -135,7 +135,7 @@ def put(self, study_id: int): update_study.update(request.json) model.db.session.commit() - return update_study.to_dict() + return update_study.to_dict(), 200 @api.response(200, "Success") @api.response(400, "Validation Error") @@ -160,4 +160,4 @@ def delete(self, study_id: int): model.db.session.delete(study) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index d6b1abfc..833a82c2 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,7 +1,7 @@ """API routes for study arm metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -43,7 +43,7 @@ def get(self, study_id): arm = model.Arm(study_) - return arm.to_dict() + return arm.to_dict(), 200 def post(self, study_id): """Create study arm metadata""" @@ -91,7 +91,7 @@ def post(self, study_id): arms = model.Arm(study_obj) - return arms.to_dict() + return arms.to_dict(), 201 # todo delete @api.route("/study//metadata/arm/") @@ -107,4 +107,4 @@ def delete(self, study_id: int, arm_id: int): model.db.session.delete(study_arm_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 09413578..1ab4d82a 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,7 +1,7 @@ """API routes for study available ipd metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -40,7 +40,7 @@ def get(self, study_id: int): study_available_ipd_, key=lambda x: x.created_at ) - return [s.to_dict() for s in sorted_study_available_ipd] + return [s.to_dict() for s in sorted_study_available_ipd], 200 @api.doc( description="An array of objects are expected within the payload with the keys demonstrated below to create an available-ipd" # noqa E501 @@ -103,7 +103,7 @@ def post(self, study_id: int): list_of_elements.append(study_available_ipd_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//metadata/available-ipd/") @@ -120,4 +120,4 @@ def delete(self, study_id: int, available_ipd_id: int): model.db.session.delete(study_available_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index ef4def18..1f385953 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -2,7 +2,7 @@ import typing from email_validator import EmailNotValidError, validate_email -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import FormatChecker, ValidationError, validate @@ -42,7 +42,7 @@ def get(self, study_id: int): sorted_study_contact = sorted(study_contact_, key=lambda x: x.created_at) - return [s.to_dict() for s in sorted_study_contact if s.central_contact] + return [s.to_dict() for s in sorted_study_contact if s.central_contact], 200 def post(self, study_id: int): """Create study contact metadata""" @@ -119,7 +119,7 @@ def validate_is_valid_email(instance): model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//metadata/central-contact/") class StudyContactUpdate(Resource): @@ -135,4 +135,4 @@ def delete(self, study_id: int, central_contact_id: int): model.db.session.delete(study_contact_) model.db.session.commit() - return study_contact_.to_dict() + return Response(status=204) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index bc7624a0..af6baff6 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -32,7 +32,7 @@ def get(self, study_id: int): study_description_ = study_.study_description - return study_description_.to_dict() + return study_description_.to_dict(), 200 def put(self, study_id: int): """Update study description metadata""" @@ -64,4 +64,4 @@ def put(self, study_id: int): model.db.session.commit() - return study_.study_description.to_dict() + return study_.study_description.to_dict(), 200 diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index ebadf2e6..3794e982 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -49,7 +49,7 @@ def get(self, study_id: int): study_design_ = study_.study_design - return study_design_.to_dict() + return study_design_.to_dict(), 200 def put(self, study_id: int): """Update study design metadata""" @@ -219,4 +219,4 @@ def put(self, study_id: int): model.db.session.commit() - return study_.study_design.to_dict() + return study_.study_design.to_dict(), 200 diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index f8f3c17e..536c7089 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -42,7 +42,7 @@ def get(self, study_id: int): """Get study eligibility metadata""" study_ = model.Study.query.get(study_id) - return study_.study_eligibility.to_dict() + return study_.study_eligibility.to_dict(), 200 def put(self, study_id: int): """Update study eligibility metadata""" @@ -93,12 +93,4 @@ def put(self, study_id: int): model.db.session.commit() - return study_.study_eligibility.to_dict() - - # def post(self, study_id: int): - # data = request.json - # study_eligibility_ = Study.query.get(study_id) - # study_eligibility_ = StudyEligibility.from_data(study_eligibility_, data) - # db.session.add(study_eligibility_) - # db.session.commit() - # return study_eligibility_.to_dict() + return study_.study_eligibility.to_dict(), 200 diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 85236bc5..da7698f2 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,7 +1,7 @@ """API routes for study identification metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -36,7 +36,7 @@ def get(self, study_id: int): """Get study identification metadata""" study_ = model.Study.query.get(study_id) identifiers = model.Identifiers(study_) - return identifiers.to_dict() + return identifiers.to_dict(), 200 @api.doc("identification add") @api.response(200, "Success") @@ -110,7 +110,7 @@ def post(self, study_id: int): final_identifiers = model.Identifiers(study_obj) - return final_identifiers.to_dict() + return final_identifiers.to_dict(), 201 @api.route("/study//metadata/identification/") class StudyIdentificationdUpdate(Resource): @@ -131,4 +131,4 @@ def delete(self, study_id: int, identification_id: int): model.db.session.delete(study_identification_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 61a5573a..bc43bd51 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -1,7 +1,7 @@ """API routes for study intervention metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -42,7 +42,7 @@ def get(self, study_id: int): study_intervention_, key=lambda x: x.created_at ) - return [s.to_dict() for s in sorted_study_intervention] + return [s.to_dict() for s in sorted_study_intervention], 200 def post(self, study_id: int): """Create study intervention metadata""" @@ -110,7 +110,7 @@ def post(self, study_id: int): list_of_elements.append(study_intervention_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//metadata/intervention/") class StudyInterventionUpdate(Resource): @@ -127,4 +127,4 @@ def delete(self, study_id: int, intervention_id: int): model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index c04d6fae..c9948b6f 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -37,7 +37,7 @@ def get(self, study_id: int): """Get study ipdsharing metadata""" study_ = model.Study.query.get(study_id) - return study_.study_ipdsharing.to_dict() + return study_.study_ipdsharing.to_dict(), 200 def put(self, study_id: int): """Create study ipdsharing metadata""" @@ -100,4 +100,4 @@ def put(self, study_id: int): return "Access denied, you can not delete study", 403 study_.study_ipdsharing.update(request.json) model.db.session.commit() - return study_.study_ipdsharing.to_dict() + return study_.study_ipdsharing.to_dict(), 200 diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index 6f97b112..ad1b2a2c 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -1,7 +1,7 @@ """API routes for study link metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -34,7 +34,7 @@ def get(self, study_id: int): study_ = model.Study.query.get(study_id) study_link_ = study_.study_link sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at) - return [s.to_dict() for s in sorted_study_link_] + return [s.to_dict() for s in sorted_study_link_], 200 def post(self, study_id: int): """Create study link metadata""" @@ -78,7 +78,7 @@ def post(self, study_id: int): list_of_elements.append(study_link_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//metadata/link/") class StudyLinkUpdate(Resource): @@ -95,4 +95,4 @@ def delete(self, study_id: int, link_id: int): model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 09ae5cc1..27d1c2f1 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -1,7 +1,7 @@ """API routes for study location metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -41,7 +41,7 @@ def get(self, study_id: int): sorted_study_location = sorted(study_location_, key=lambda x: x.created_at) - return [s.to_dict() for s in sorted_study_location] + return [s.to_dict() for s in sorted_study_location], 200 def post(self, study_id: int): """Create study location metadata""" @@ -96,7 +96,7 @@ def post(self, study_id: int): list_of_elements.append(study_location_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//metadata/location/") @@ -114,4 +114,4 @@ def delete(self, study_id: int, location_id: int): model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index fd04de4a..bcf359fc 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -37,7 +37,7 @@ def get(self, study_id: int): study_other_ = study_.study_other - return study_other_.to_dict() + return study_other_.to_dict(), 200 def put(self, study_id: int): """Update study other metadata""" @@ -75,7 +75,7 @@ def put(self, study_id: int): model.db.session.commit() - return study_.study_other.to_dict() + return study_.study_other.to_dict(), 200 @api.route("/study//metadata/oversight") @@ -91,7 +91,7 @@ def get(self, study_id: int): study_ = model.Study.query.get(study_id) study_oversight_has_dmc = study_.study_other.oversight_has_dmc - return {"oversight": study_oversight_has_dmc} + return {"oversight": study_oversight_has_dmc}, 200 def put(self, study_id: int): """Update study oversight metadata""" @@ -118,7 +118,7 @@ def put(self, study_id: int): study_obj.touch() model.db.session.commit() - return study_oversight + return study_oversight, 200 # todo: rename class @@ -136,7 +136,7 @@ def get(self, study_id: int): study_other_conditions = study_.study_other.conditions - return study_other_conditions + return study_other_conditions, 200 def put(self, study_id: int): """Update study conditions metadata""" @@ -162,4 +162,4 @@ def put(self, study_id: int): study_obj.touch() model.db.session.commit() - return study_obj.study_other.conditions + return study_obj.study_other.conditions, 200 diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index def4a2da..c0d50c6f 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,7 +1,7 @@ """API routes for study overall official metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -42,7 +42,7 @@ def get(self, study_id: int): study_overall_official_, key=lambda x: x.created_at ) - return [i.to_dict() for i in sorted_study_overall] + return [i.to_dict() for i in sorted_study_overall], 200 @api.response(200, "Success") @api.response(400, "Validation Error") @@ -94,7 +94,7 @@ def post(self, study_id: int): list_of_elements.append(study_overall_official_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//metadata/overall-official/") class StudyOverallOfficialUpdate(Resource): @@ -111,4 +111,4 @@ def delete(self, study_id: int, overall_official_id: int): model.db.session.delete(study_overall_official_) model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 6e01ea41..d084b724 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,7 +1,7 @@ """API routes for study reference metadata""" import typing -from flask import request +from flask import request, Response from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -39,7 +39,7 @@ def get(self, study_id: int): sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at) - return [s.to_dict() for s in sorted_study_reference] + return [s.to_dict() for s in sorted_study_reference], 200 def post(self, study_id: int): """Create study reference metadata""" @@ -80,7 +80,7 @@ def post(self, study_id: int): list_of_elements.append(study_reference_.to_dict()) model.db.session.commit() - return list_of_elements + return list_of_elements, 201 @api.route("/study//metadata/reference/") class StudyReferenceUpdate(Resource): @@ -97,4 +97,4 @@ def delete(self, study_id: int, reference_id: int): model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 73009e83..8a040c4d 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -45,7 +45,7 @@ def get(self, study_id: int): study_sponsors_collaborators_ = study_.study_sponsors_collaborators - return study_sponsors_collaborators_.to_dict() + return study_sponsors_collaborators_.to_dict(), 200 def put(self, study_id: int): """Update study sponsors metadata""" @@ -121,7 +121,7 @@ def put(self, study_id: int): model.db.session.commit() - return study_.study_sponsors_collaborators.to_dict() + return study_.study_sponsors_collaborators.to_dict(), 200 @api.route("/study//metadata/collaborators") @@ -138,7 +138,7 @@ def get(self, study_id: int): study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name - return study_collaborators_ + return study_collaborators_, 200 @api.response(200, "Success") @api.response(400, "Validation Error") @@ -162,4 +162,4 @@ def put(self, study_id: int): study_obj.study_sponsors_collaborators.collaborator_name = data study_obj.touch() model.db.session.commit() - return study_obj.study_sponsors_collaborators.collaborator_name + return study_obj.study_sponsors_collaborators.collaborator_name, 200 diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 941220a2..cd4cef42 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -39,7 +39,7 @@ def get(self, study_id: int): study_status_ = study_.study_status - return study_status_.to_dict() + return study_status_.to_dict(), 200 def put(self, study_id: int): """Update study status metadata""" @@ -107,4 +107,4 @@ def put(self, study_id: int): model.db.session.commit() - return study.study_status.to_dict() + return study.study_status.to_dict(), 200 diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py index 2e14d02a..943eae5d 100644 --- a/model/dataset_metadata/dataset_contributor.py +++ b/model/dataset_metadata/dataset_contributor.py @@ -44,7 +44,7 @@ def to_dict_metadata(self): return { "id": self.id, "name": self.name, - "name_type": self.name_identifier, + "name_type": self.name_type, "contributor_type": self.contributor_type, "creator": self.creator, } diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 0225972b..b1846743 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -54,7 +54,6 @@ def to_dict_metadata(self): def to_dict_publisher(self): return { "managing_organization_name": self.managing_organization_name, - "managing_organization_ror_id": self.managing_organization_ror_id, "publisher": self.publisher, } diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 81019b09..781c8cfc 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -71,11 +71,8 @@ def to_dict_metadata(self): """Converts the study metadata to a dictionary""" return { "gender": self.gender, - "gender_based": self.gender_based, "minimum_age_value": self.minimum_age_value, - "maximum_age_value": self.maximum_age_value, - "inclusion_criteria": self.inclusion_criteria, - "exclusion_criteria": self.exclusion_criteria, + "gender_based": self.gender_based, } @staticmethod diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 27ac6476..5e4d55df 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -49,7 +49,6 @@ def to_dict_metadata(self): return { "id": self.id, "facility": self.facility, - "city": self.city, "country": self.country, } diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 069f3099..08c1f84e 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -42,8 +42,8 @@ def to_dict_metadata(self): """Converts the study metadata to a dictionary""" return { "name": self.name, - "role": self.role, "affiliation": self.affiliation, + "role": self.role, } @staticmethod diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py index f82dc725..d4bb84db 100644 --- a/model/study_metadata/study_sponsors_collaborators.py +++ b/model/study_metadata/study_sponsors_collaborators.py @@ -50,8 +50,8 @@ def to_dict_metadata(self): return { "responsible_party_type": self.responsible_party_type, "responsible_party_investigator_name": self.responsible_party_investigator_name, + # "collaborator_name": self.collaborator_name, "lead_sponsor_name": self.lead_sponsor_name, - "collaborator_name": self.collaborator_name, } @staticmethod diff --git a/tests/conftest.py b/tests/conftest.py index ceb29982..56fa4d12 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,7 +18,6 @@ # Set global variable for study ID # Study variables use for testing pytest.global_study_id = {} -pytest.global_version_id = "" pytest.global_arm_id = "" pytest.global_available_ipd_id = "" pytest.global_cc_id = "" @@ -31,7 +30,7 @@ # Dataset variables use for testing pytest.global_dataset_id = "" -pytest.global_dataset_version_id = "" + pytest.global_alternative_identifier_id = "" pytest.global_dataset_contributor_id = "" pytest.global_dataset_creator_id = "" @@ -47,6 +46,8 @@ pytest.global_dataset_subject_id = "" pytest.global_dataset_title_id = "" +pytest.global_dataset_version_id = "" + # Create the flask app for testing @pytest.fixture(scope="session") diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index da912975..6cf6e37e 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -19,7 +19,7 @@ def test_post_study(_logged_in_client): }, ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) assert response_data["title"] == "Study Title" @@ -99,11 +99,15 @@ def test_delete_studies_created(_logged_in_client): }, ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) study_id = response_data["id"] # delete study response = _logged_in_client.delete(f"/study/{study_id}") - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get("/study") + assert response_get.status_code == 200 + assert len(json.loads(response_get.data)) == 1 diff --git a/tests/functional/test_study_dataset_api.py b/tests/functional/test_study_dataset_api.py index 8471496d..c6985a52 100644 --- a/tests/functional/test_study_dataset_api.py +++ b/tests/functional/test_study_dataset_api.py @@ -35,7 +35,7 @@ def test_post_dataset(_logged_in_client): }, ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_id = response_data["id"] @@ -72,7 +72,7 @@ def test_delete_dataset_from_study(_logged_in_client): }, ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) dataset_id = response_data["id"] @@ -81,7 +81,10 @@ def test_delete_dataset_from_study(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}", ) - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get(f"/study/{study_id}/dataset") + assert response_get.status_code == 200 + assert len(json.loads(response_get.data)) == 1 def test_post_dataset_version(_logged_in_client): @@ -104,7 +107,7 @@ def test_post_dataset_version(_logged_in_client): }, ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_version_id = response_data["id"] diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 13099053..e0a84199 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -87,7 +87,7 @@ def test_post_alternative_identifier(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_alternative_identifier_id = response_data[0]["id"] @@ -110,7 +110,13 @@ def test_delete_alternative_identifier(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + assert response_get.status_code == 200 + + assert len(json.loads(response_get.data)) == 0 # ------------------- CONSENT METADATA ------------------- # @@ -197,7 +203,7 @@ def test_post_dataset_contributor_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_contributor_id = response_data[0]["id"] @@ -246,7 +252,14 @@ def test_delete_dataset_contributor_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" + ) + assert response_get.status_code == 200 + + assert len(json.loads(response_get.data)) == 0 # ------------------- CREATOR METADATA ------------------- # @@ -296,7 +309,7 @@ def test_post_dataset_creator_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_creator_id = response_data[0]["id"] @@ -327,7 +340,14 @@ def test_delete_dataset_creator_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" + ) + assert response_get.status_code == 200 + + assert len(json.loads(response_get.data)) == 0 # ------------------- DATE METADATA ------------------- # @@ -363,7 +383,7 @@ def test_post_dataset_date_metadata(_logged_in_client): json=[{"date": 20210101, "type": "Type", "information": "Info"}], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_date_id = response_data[0]["id"] @@ -387,7 +407,14 @@ def test_delete_dataset_date_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date" + ) + assert response_get.status_code == 200 + + assert len(json.loads(response_get.data)) == 0 # ------------------- DE-IDENTIFICATION LEVEL METADATA ------------------- # @@ -480,7 +507,7 @@ def test_post_dataset_description_metadata(_logged_in_client): json=[{"description": "Description", "type": "Methods"}], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_description_id = response_data[0]["id"] @@ -504,7 +531,15 @@ def test_delete_dataset_description_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + ) + assert ( + len(json.loads(response_get.data)) == 1 + and json.loads(response_get.data)[0]["type"] == "Abstract" + ) # ------------------- FUNDER METADATA ------------------- # @@ -552,7 +587,7 @@ def test_post_dataset_funder_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_funder_id = response_data[0]["id"] @@ -581,7 +616,13 @@ def test_delete_dataset_funder_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + ) + assert response_get.status_code == 200 + assert len(json.loads(response_get.data)) == 0 # ------------------- OTHER METADATA ------------------- # @@ -720,9 +761,8 @@ def test_put_dataset_record_keys_metadata(_logged_in_client): json={"type": "Record Type", "details": "Details for Record Keys"}, ) - assert response.status_code == 201 + assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["type"] == "Record Type" assert response_data["details"] == "Details for Record Keys" @@ -787,7 +827,10 @@ def test_post_dataset_related_item_metadata(_logged_in_client): "publication_year": 2013, "publisher": "Publisher", "relation_type": "Relation Type", - "titles": [{"title": "Title", "type": "MainTitle"}], + "titles": [ + {"title": "Title", "type": "MainTitle"}, + {"title": "Title", "type": "Subtitle"}, + ], "type": "Type", "volume": "Volume", } @@ -829,6 +872,8 @@ def test_post_dataset_related_item_metadata(_logged_in_client): assert response_data[0]["relation_type"] == "Relation Type" assert response_data[0]["titles"][0]["title"] == "Title" assert response_data[0]["titles"][0]["type"] == "MainTitle" + assert response_data[0]["titles"][1]["title"] == "Title" + assert response_data[0]["titles"][1]["type"] == "Subtitle" assert response_data[0]["type"] == "Type" assert response_data[0]["volume"] == "Volume" @@ -851,7 +896,13 @@ def test_delete_dataset_related_item_contributor_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{contributor_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + assert response_get.status_code == 200 + assert len(json.loads(response_get.data)[0]["contributors"]) == 0 def test_delete_dataset_related_item_creator_metadata(_logged_in_client): @@ -872,7 +923,13 @@ def test_delete_dataset_related_item_creator_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{creator_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + + assert len(json.loads(response_get.data)[0]["creators"]) == 0 def test_delete_dataset_related_item_identifier_metadata(_logged_in_client): @@ -893,7 +950,14 @@ def test_delete_dataset_related_item_identifier_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{identifier_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + + assert response_get.status_code == 200 + assert len(json.loads(response_get.data)[0]["identifiers"]) == 0 def test_delete_dataset_related_item_title_metadata(_logged_in_client): @@ -907,14 +971,35 @@ def test_delete_dataset_related_item_title_metadata(_logged_in_client): study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id related_item_id = pytest.global_dataset_related_item_id - title_id = pytest.global_dataset_related_item_title_id - + # title_id = pytest.global_dataset_related_item_title_id # pylint: disable=line-too-long - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{title_id}" + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" ) - assert response.status_code == 200 + # titles_to_delete = [ + # # f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{title_id}" + # for i in json.loads(response_get.data)[0]["titles"] + # if i["type"] != "MainTitle" + # ] + + for i in json.loads(response_get.data)[0]["titles"]: + if i["type"] != "MainTitle": + t_id = i["id"] + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{t_id}" + ) + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + + assert response_get.status_code == 200 + assert ( + len(json.loads(response_get.data)[0]["titles"]) == 1 + and json.loads(response_get.data)[0]["titles"][0]["type"] == "MainTitle" + ) def test_delete_dataset_related_item_metadata(_logged_in_client): @@ -933,7 +1018,13 @@ def test_delete_dataset_related_item_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + + assert response_get.status_code == 200 + assert len(json.loads(response_get.data)) == 0 # ------------------- RIGHTS METADATA ------------------- # @@ -978,7 +1069,7 @@ def test_post_dataset_rights_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_rights_id = response_data[0]["id"] @@ -1004,7 +1095,14 @@ def test_delete_dataset_rights_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" + ) + assert response_get.status_code == 200 + + assert len(json.loads(response_get.data)) == 0 # ------------------- SUBJECTS METADATA ------------------- # @@ -1050,7 +1148,7 @@ def test_post_dataset_subjects_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_subject_id = response_data[0]["id"] @@ -1076,7 +1174,14 @@ def test_delete_dataset_subject_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject" + ) + assert response_get.status_code == 200 + + assert len(json.loads(response_get.data)) == 0 # ------------------- TITLE METADATA ------------------- # @@ -1114,7 +1219,7 @@ def test_post_dataset_title_metadata(_logged_in_client): json=[{"title": "Title", "type": "Subtitle"}], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_title_id = response_data[0]["id"] @@ -1138,4 +1243,13 @@ def test_delete_dataset_title_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + ) + assert response_get.status_code == 200 + assert ( + len(json.loads(response_get.data)) == 1 + and json.loads(response_get.data)[0]["type"] == "MainTitle" + ) diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index 08296ca3..77f71677 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -26,7 +26,7 @@ def test_post_arm_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_arm_id = response_data["arms"][0]["id"] @@ -69,10 +69,11 @@ def test_delete_arm_metadata(_logged_in_client): """ study_id = pytest.global_study_id["id"] # type: ignore arm_id = pytest.global_arm_id - response = _logged_in_client.delete(f"/study/{study_id}/metadata/arm/{arm_id}") - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/arm") + assert len(json.loads(response_get.data)["arms"]) == 0 # ------------------- IPD METADATA ------------------- # @@ -96,7 +97,7 @@ def test_post_available_ipd_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_available_ipd_id = response_data[0]["id"] @@ -132,7 +133,10 @@ def test_delete_available_ipd_metadata(_logged_in_client): f"/study/{study_id}/metadata/available-ipd/{available_ipd_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/available-ipd") + + assert len(json.loads(response_get.data)) == 0 # ------------------- CENTRAL CONTACT METADATA ------------------- # @@ -158,7 +162,7 @@ def test_post_cc_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_cc_id = response_data[0]["id"] @@ -208,7 +212,10 @@ def test_delete_cc_metadata(_logged_in_client): f"/study/{study_id}/metadata/central-contact/{central_contact_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/central-contact") + + assert len(json.loads(response_get.data)) == 0 # ------------------- COLLABORATORS METADATA ------------------- # @@ -496,7 +503,7 @@ def test_post_identification_metadata(_logged_in_client): }, ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_identification_id = response_data["secondary"][0]["id"] @@ -522,8 +529,11 @@ def test_delete_identification_metadata(_logged_in_client): response = _logged_in_client.delete( f"/study/{study_id}/metadata/identification/{identification_id}" ) + assert response.status_code == 204 - assert response.status_code == 200 + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/identification") + # print(response_get.data) + assert len(json.loads(response_get.data)["secondary"]) == 0 # ------------------- INTERVENTION METADATA ------------------- # @@ -561,7 +571,7 @@ def test_post_intervention_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_intervention_id = response_data[0]["id"] @@ -572,6 +582,24 @@ def test_post_intervention_metadata(_logged_in_client): assert response_data[0]["other_name_list"] == ["uhh", "yes"] +def test_delete_intervention_metadata(_logged_in_client): + """ + Given a Flask application configured for testing and a study ID and link ID + WHEN the '/study/{study_id}/metadata/intervention/{intervention_id}' endpoint is requested (DELETE) + THEN check that the response is valid and deletes the link metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + intervention_id = pytest.global_intervention_id + + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/intervention/{intervention_id}" + ) + assert response.status_code == 204 + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/intervention") + + assert len(json.loads(response_get.data)) == 0 + + # ------------------- IPD SHARING METADATA ------------------- # def test_get_ipdsharing_metadata(_logged_in_client): """ @@ -647,7 +675,7 @@ def test_post_link_metadata(_logged_in_client): json=[{"url": "google.com", "title": "google link"}], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_link_id = response_data[0]["id"] @@ -666,7 +694,11 @@ def test_delete_link_metadata(_logged_in_client): response = _logged_in_client.delete(f"/study/{study_id}/metadata/link/{link_id}") - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/link") + + assert len(json.loads(response_get.data)) == 0 # ------------------- LOCATION METADATA ------------------- # @@ -705,7 +737,7 @@ def test_post_location_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_location_id = response_data[0]["id"] @@ -731,7 +763,11 @@ def test_delete_location_metadata(_logged_in_client): f"/study/{study_id}/metadata/location/{location_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/location") + + assert len(json.loads(response_get.data)) == 0 # ------------------- OTHER METADATA ------------------- # @@ -760,7 +796,7 @@ def test_put_other_metadata(_logged_in_client): f"/study/{study_id}/metadata/other", json={ "oversight_has_dmc": False, - "conditions": ["true", "conditions", "keywords", "1"], + "conditions": ["c"], "keywords": ["true", "u"], "size": 103, }, @@ -770,7 +806,7 @@ def test_put_other_metadata(_logged_in_client): response_data = json.loads(response.data) assert response_data["oversight_has_dmc"] is False - assert response_data["conditions"] == ["true", "conditions", "keywords", "1"] + assert response_data["conditions"] == ["c"] assert response_data["keywords"] == ["true", "u"] assert response_data["size"] == 103 @@ -802,7 +838,7 @@ def test_post_overall_official_metadata(_logged_in_client): json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_overall_official_id = response_data[0]["id"] @@ -826,7 +862,10 @@ def test_delete_overall_official_metadata(_logged_in_client): f"/study/{study_id}/metadata/overall-official/{overall_official_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/overall-official") + + assert len(json.loads(response_get.data)) == 0 # ------------------- OVERSIGHT METADATA ------------------- # @@ -894,7 +933,7 @@ def test_post_reference_metadata(_logged_in_client): ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_reference_id = response_data[0]["id"] @@ -918,7 +957,10 @@ def test_delete_reference_metadata(_logged_in_client): f"/study/{study_id}/metadata/reference/{reference_id}" ) - assert response.status_code == 200 + assert response.status_code == 204 + response_get = _logged_in_client.get(f"/study/{study_id}/metadata/reference") + + assert len(json.loads(response_get.data)) == 0 # ------------------- SPONSORS METADATA ------------------- # @@ -994,9 +1036,9 @@ def test_put_status_metadata(_logged_in_client): json={ "overall_status": "Withdrawn", "why_stopped": "test", - "start_date": "fff", + "start_date": "2023-11-15 00:00:00", "start_date_type": "Actual", - "completion_date": "nuzzzll", + "completion_date": "2023-11-16 00:00:00", "completion_date_type": "Actual", }, ) @@ -1006,7 +1048,7 @@ def test_put_status_metadata(_logged_in_client): assert response_data["overall_status"] == "Withdrawn" assert response_data["why_stopped"] == "test" - assert response_data["start_date"] == "fff" + assert response_data["start_date"] == "2023-11-15 00:00:00" assert response_data["start_date_type"] == "Actual" - assert response_data["completion_date"] == "nuzzzll" + assert response_data["completion_date"] == "2023-11-16 00:00:00" assert response_data["completion_date_type"] == "Actual" diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py new file mode 100644 index 00000000..89d50e78 --- /dev/null +++ b/tests/functional/test_study_version_api.py @@ -0,0 +1,466 @@ +# pylint: disable=too-many-lines +"""Tests for the Study Metadata API endpoints""" +import json + +import pytest + +# ------------------- VERSION ADD ------------------- # + + +def test_get_version_study_metadata(_logged_in_client): + """ + Given a Flask application configured for testing + WHEN the /study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata endpoint is requested (GET) + THEN check that the response is valid and retrieves the design metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id # type: ignore + version_id = pytest.global_dataset_version_id # type: ignore + + _logged_in_client.post( + f"/study/{study_id}/metadata/arm", + json=[ + { + "label": "Label1", + "type": "Experimental", + "description": "Arm Description", + "intervention_list": ["intervention1", "intervention2"], + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/available-ipd", + json=[ + { + "identifier": "identifier1", + "type": "Clinical Study Report", + "url": "google.com", + "comment": "comment1", + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/central-contact", + json=[ + { + "name": "central-contact", + "affiliation": "affiliation", + "role": "role", + "phone": "808", + "phone_ext": "909", + "email_address": "sample@gmail.com", + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/location", + json=[ + { + "facility": "test", + "status": "Withdrawn", + "city": "city", + "state": "ca", + "zip": "test", + "country": "yes", + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/identification", + json={ + "primary": { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/intervention", + json=[ + { + "type": "Device", + "name": "name test", + "description": "desc", + "arm_group_label_list": ["test", "one"], + "other_name_list": ["uhh", "yes"], + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/link", + json=[{"url": "google.com", "title": "google link"}], + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/overall-official", + json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], + ) + _logged_in_client.post( + f"/study/{study_id}/metadata/reference", + json=[ + { + "identifier": "reference identifier", + "type": "Yes", + "citation": "reference citation", + } + ], + ) + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" + ) + response_data = json.loads(response.data) + # print(response_data) + assert response.status_code == 200 + assert response_data["available_ipd"][0]["identifier"] == "identifier1" + assert response_data["available_ipd"][0]["url"] == "google.com" + assert response_data["arms"][0]["label"] == "Label1" + + assert response_data["contacts"][0]["name"] == "central-contact" + assert response_data["contacts"][0]["affiliation"] == "affiliation" + + assert response_data["secondary_identifiers"][0]["identifier"] == "test" + assert response_data["secondary_identifiers"][0]["identifier_type"] == "test" + assert response_data["interventions"][0]["type"] == "Device" + assert response_data["interventions"][0]["name"] == "name test" + assert response_data["links"][0]["title"] == "google link" + assert response_data["links"][0]["url"] == "google.com" + assert response_data["locations"][0]["country"] == "yes" + assert response_data["locations"][0]["facility"] == "test" + assert response_data["overall_officials"][0]["name"] == "test" + assert response_data["overall_officials"][0]["role"] == "Study Chair" + assert response_data["overall_officials"][0]["affiliation"] == "aff" + assert response_data["references"][0]["identifier"] == "reference identifier" + assert response_data["references"][0]["citation"] == "reference citation" + + assert response_data["description"]["brief_summary"] == "brief_summary" + assert response_data["design"]["design_allocation"] == "dfasdfasd" + + assert response_data["design"]["study_type"] == "Interventional" + assert response_data["design"]["design_intervention_model"] == "Treatment" + assert response_data["design"]["design_primary_purpose"] == "Parallel Assignment" + assert response_data["design"]["design_masking"] == "Double" + assert response_data["design"]["design_masking_description"] == "tewsfdasf" + assert response_data["design"]["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert response_data["design"]["phase_list"] == ["N/A"] + assert response_data["design"]["enrollment_count"] == 3 + assert response_data["design"]["enrollment_type"] == "Actual" + assert response_data["design"]["number_arms"] == 2 + assert response_data["design"]["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert response_data["design"]["design_time_perspective_list"] == ["Other"] + assert response_data["design"]["bio_spec_retention"] == "None Retained" + assert response_data["design"]["target_duration"] == "rewrwe" + assert response_data["design"]["number_groups_cohorts"] == 1 + assert response_data["eligibility"]["gender"] == "All" + assert response_data["eligibility"]["gender_based"] == "Yes" + assert response_data["eligibility"]["minimum_age_value"] == 18 + assert response_data["primary_identifier"]["identifier"] == "test" + assert response_data["primary_identifier"]["identifier_type"] == "test" + assert response_data["status"]["overall_status"] == "Withdrawn" + assert response_data["status"]["start_date"] == "2023-11-15 00:00:00" + assert ( + response_data["sponsors"]["responsible_party_investigator_name"] == "party name" + ) + assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert response_data["sponsors"]["lead_sponsor_name"] == "sponsor name" + assert response_data["collaborators"] == ["collaborator1123"] + assert response_data["conditions"] == ["c"] + + assert response_data["ipd_sharing"]["ipd_sharing"] == "Yes" + assert response_data["ipd_sharing"]["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + + assert response_data["oversight"] is True + + +def test_get_version_dataset_metadata(_logged_in_client): + """ + Given a Flask application configured for testing + WHEN the '/study//dataset//version//dataset-metadata' endpoint is requested (GET) + THEN check that the response is valid and retrieves the design metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id # type: ignore + version_id = pytest.global_dataset_version_id # type: ignore + + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", + json=[ + { + "name": "Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", + json=[ + { + "name": "Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date", + json=[{"date": 20210101, "type": "Type", "information": "Info"}], + ) + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + json=[ + { + "name": "Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", + json=[ + { + "identifier": "Identifier", + "identifier_scheme": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", + json=[ + { + "classification_code": "Classification Code", + "scheme": "Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Value URI", + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "identifier test", + "type": "ARK", + } + ], + ) + _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + json=[ + { + "contributors": [ + { + "name": "Ndafsdame", + "contributor_type": "Con Type", + "name_type": "Personal", + } + ], + "creators": [{"name": "Name", "name_type": "Personal"}], + "edition": "Edition", + "first_page": "First Page", + "identifiers": [ + { + "identifier": "Identifier", + "metadata_scheme": "Metadata Scheme", + "scheme_type": "Scheme Type", + "scheme_uri": "Scheme URI", + "type": "ARK", + } + ], + "issue": "Issue", + "last_page": "Last Page", + "number_type": "Number Type", + "number_value": "Number Value", + "publication_year": 2013, + "publisher": "Publisher", + "relation_type": "Relation Type", + "titles": [ + {"title": "Title", "type": "MainTitle"}, + {"title": "Title", "type": "Subtitle"}, + ], + "type": "Type", + "volume": "Volume", + } + ], + ) + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/dataset-metadata" + ) + response_data = json.loads(response.data) + # print(response_data) + + assert response.status_code == 200 + + assert response_data["contributors"][0]["name"] == "Name here" + assert response_data["contributors"][0]["name_type"] == "Personal" + assert response_data["contributors"][0]["contributor_type"] == "Con Type" + assert response_data["dates"][0]["date"] == "01-01-1970" + assert response_data["dates"][0]["type"] == "Type" + assert response_data["creators"][0]["name"] == "Name here" + assert response_data["creators"][0]["name_type"] == "Personal" + assert response_data["funders"][0]["name"] == "Name" + assert response_data["funders"][0]["identifier"] == "Identifier" + assert response_data["rights"][0]["identifier"] == "Identifier" + assert response_data["rights"][0]["rights"] == "Rights" + assert response_data["subjects"][0]["subject"] == "Subject" + assert response_data["about"]["language"] == "English" + + assert response_data["about"]["resource_type"] == "Resource Type" + assert response_data["about"]["size"] == ["Size"] + assert response_data["access"]["type"] == "type" + assert response_data["access"]["description"] == "description" + assert response_data["consent"]["noncommercial"] is True + assert response_data["consent"]["geog_restrict"] is True + assert response_data["consent"]["research_type"] is True + assert response_data["de_identification"]["direct"] is True + assert response_data["de_identification"]["type"] == "Level" + assert response_data["publisher"]["publisher"] == "Publisher" + assert ( + response_data["publisher"]["managing_organization_name"] + == "Managing Organization Name" + ) + + assert response_data["identifiers"][0]["identifier"] == "identifier test" + assert response_data["identifiers"][0]["type"] == "ARK" + assert response_data["related_items"][0]["publication_year"] == "1970" + assert response_data["related_items"][0]["publisher"] == "Publisher" + assert response_data["related_items"][0]["contributors"][0]["name"] == "Ndafsdame" + assert ( + response_data["related_items"][0]["contributors"][0]["contributor_type"] + == "Con Type" + ) + assert response_data["related_items"][0]["creators"][0]["name"] == "Name" + assert response_data["related_items"][0]["creators"][0]["name_type"] == "Personal" + assert response_data["related_items"][0]["titles"][0]["title"] == "Title" + assert response_data["related_items"][0]["titles"][0]["type"] == "MainTitle" + assert response_data["related_items"][0]["titles"][1]["title"] == "Title" + assert response_data["related_items"][0]["titles"][1]["type"] == "Subtitle" + assert ( + response_data["related_items"][0]["identifiers"][0]["identifier"] + == "Identifier" + ) + assert response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" + assert response_data["related_items"][0]["type"] == "Type" + + +def test_get_version_readme(_logged_in_client): + """ + Given a Flask application configured for testing + WHEN the '/study//dataset//version//readme' endpoint is requested (GET) + THEN check that the response is valid and retrieves the design metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id # type: ignore + version_id = pytest.global_dataset_version_id # type: ignore + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme" + ) + + assert response.status_code == 200 + + +def test_put_version_readme(_logged_in_client): + """ + Given a Flask application configured for testing + WHEN the '/study//dataset//version//readme' endpoint is requested (PUT) + THEN check that the response is valid and retrieves the design metadata + """ + # create a new dataset and delete it afterwards + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id # type: ignore + version_id = pytest.global_dataset_version_id # type: ignore + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme", + json={"readme": "readme test"}, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["readme"] == "readme test" + + +def test_put_version_changelog(_logged_in_client): + """ + Given a Flask application configured for testing + WHEN the '/study//dataset//version//changelog' endpoint is requested (PUT) + THEN check that the response is valid and retrieves the design metadata + """ + # create a new dataset and delete it afterwards + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id # type: ignore + version_id = pytest.global_dataset_version_id # type: ignore + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog", + json={"changelog": "changelog test"}, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data == "changelog test" + + +def test_get_version_changelog(_logged_in_client): + """ + Given a Flask application configured for testing + WHEN the '/study//dataset//version//changelog' endpoint is requested (GET) + THEN check that the response is valid and retrieves the design metadata + """ + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id # type: ignore + version_id = pytest.global_dataset_version_id # type: ignore + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog" + ) + + assert response.status_code == 200 From d1d829b29b38f15a7d918986da4be7b3e5da607b Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 30 Nov 2023 14:51:41 -0800 Subject: [PATCH 374/505] chore: update readme for better instructions --- README.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index a68b3b90..b2c62df3 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,8 @@ You will need the following installed on your system: If you would like to update the api, please follow the instructions below. +Don't forget to start the database before running the api. See [Database](#database) for more information. + 1. Create a local virtual environment and activate it: ```bash @@ -83,20 +85,19 @@ If you would like to update the api, please follow the instructions below. ## Database -The api uses a postgres database. You can run a postgres database locally using docker: +The api uses a postgres database. You can create a database locally using docker: ```bash -docker-compose -f ./db-docker-compose.yml up +docker-compose -f ./db-docker-compose.yaml up +docker-compose -f ./db-docker-compose.yaml up -d # if you want the db to run in the background ``` Close the database with: ```bash -docker-compose -f ./db-docker-compose.yml down -v +docker-compose -f ./db-docker-compose.yaml down -v ``` -This database will not persist data between runs. - ## License This work is licensed under From a84ff4bcc1603382f230c56d9a12f3166719d561 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 4 Dec 2023 18:40:58 -0800 Subject: [PATCH 375/505] =?UTF-8?q?=F0=9F=9A=A8=20fix:=20precommit=20error?= =?UTF-8?q?s=20fixed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .pylint.ini | 2 +- apis/dashboard.py | 290 ++++++++-- apis/redcap.py | 58 +- app.py | 28 +- model/study_redcap_project_api.py | 32 +- model/study_redcap_project_dashboard.py | 36 +- modules/etl/config/aireadi_config.py | 622 +++++++++++---------- modules/etl/transforms/module_transform.py | 59 +- modules/etl/transforms/redcap_transform.py | 206 ++++--- modules/etl/vtypes/__init__.py | 2 +- modules/etl/vtypes/categorical.py | 8 +- modules/etl/vtypes/compound.py | 30 +- modules/etl/vtypes/continuous.py | 9 +- modules/etl/vtypes/discrete.py | 12 +- modules/etl/vtypes/mixed.py | 22 +- modules/etl/vtypes/timeseries.py | 8 +- modules/etl/vtypes/vtype.py | 58 +- sql/init.sql | 5 +- sql/init_timezones.sql | 5 +- sql/specific_tables.sql | 12 +- 20 files changed, 907 insertions(+), 597 deletions(-) diff --git a/.pylint.ini b/.pylint.ini index 965d3ef4..63f83137 100644 --- a/.pylint.ini +++ b/.pylint.ini @@ -409,4 +409,4 @@ known-third-party=enchant # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=builtins.BaseException, - builtins.Exception \ No newline at end of file + builtins.Exception diff --git a/apis/dashboard.py b/apis/dashboard.py index f5be63fa..19e61b2d 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -1,5 +1,5 @@ """API routes for study redcap""" -from typing import Any, Union +from typing import Any, Dict, List, Union from flask import request @@ -53,6 +53,21 @@ }, ) +redcap_project_report_model = api.model( + "RedcapProjectReport", + { + "report_id": fields.String( + required=True, readonly=True, description="REDCap report ID" + ), + "report_key": fields.String( + required=True, readonly=True, description="REDCap report key" + ), + "report_name": fields.String( + required=True, readonly=True, description="REDCap report name" + ), + }, +) + redcap_project_dashboard_module_model = api.model( "RedcapProjectDashboardModule", { @@ -62,10 +77,10 @@ "id": fields.String( required=True, readonly=True, description="Dashboard module id" ), - "reportId": fields.String( + "report_key": fields.String( required=True, readonly=True, - description="Dashboard module associated REDCap report ID", + description="Dashboard module associated REDCap report key", ), "selected": fields.Boolean( required=True, readonly=True, description="Dashboard module is selected" @@ -84,6 +99,14 @@ "project_id": fields.String( required=True, readonly=True, description="REDCap project ID (pid)" ), + "reports": fields.List( + fields.Nested( + redcap_project_report_model, + required=True, + readonly=True, + description="Associated REDCap reports", + ) + ), "dashboard_id": fields.String( required=True, readonly=True, description="REDCap dashboard ID" ), @@ -100,18 +123,61 @@ ), }, ) +redcap_project_dashboard_module_connector_model = api.model( + "RedcapProjectDashboardModuleConnector", + { + "name": fields.String( + required=True, readonly=True, description="Dashboard module name" + ), + "id": fields.String( + required=True, readonly=True, description="Dashboard module id" + ), + "report_key": fields.String( + required=True, + readonly=True, + description="Dashboard module associated REDCap report key", + ), + "selected": fields.Boolean( + required=True, readonly=True, description="Dashboard module is selected" + ), + }, +) +redcap_project_dashboard_connector_model = api.model( + "RedcapProjectDashboardConnector", + { + "project_id": fields.String( + required=True, readonly=True, description="REDCap project ID (pid)" + ), + "reports": fields.List( + fields.Nested( + redcap_project_report_model, + required=True, + readonly=True, + description="Associated REDCap reports", + ) + ), + "dashboard_id": fields.String( + required=True, readonly=True, description="REDCap dashboard ID" + ), + "dashboard_name": fields.String( + required=True, readonly=True, description="REDCap dashboard name" + ), + "dashboard_modules": fields.List( + fields.Nested( + redcap_project_dashboard_module_connector_model, + required=True, + readonly=True, + description="REDCap dashboard module connector", + ) + ), + }, +) # Parser dashboard_parser = reqparse.RequestParser() dashboard_parser.add_argument("dashboard_id", type=str, help="Dashboard ID") -# Dashboard Cache Key -def dashboard_cache_key(): - print(request.get_json()) - return ",".join([f"{key}={value}" for key, value in request.get_json().items()]) - - @api.route("/study//dashboard/all") class RedcapProjectDashboards(Resource): @api.doc("redcap_project_dashboards") @@ -119,28 +185,28 @@ class RedcapProjectDashboards(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model, as_list=True) def get(self, study_id: int): - """Get all study REDCap project dashboard""" + """Get all REDCap project dashboard""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 - redcap_project_dashboards = model.StudyRedcapProjectDashboard.query.filter_by( - study=study + redcap_project_dashboards_query = ( + model.StudyRedcapProjectDashboard.query.filter_by(study=study) ) - redcap_project_dashboards = [ + redcap_project_dashboards: List[Dict[str, Any]] = [ redcap_project_dashboard.to_dict() - for redcap_project_dashboard in redcap_project_dashboards + for redcap_project_dashboard in redcap_project_dashboards_query ] return redcap_project_dashboards, 201 -@api.route("/study//dashboard/connect") -class ConnectRedcapProjectDashboard(Resource): +@api.route("/study//dashboard/add") +class AddRedcapProjectDashboard(Resource): @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) def post(self, study_id: int): - """Create study REDCap project dashboard""" + """Create REDCap project dashboard""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 @@ -150,11 +216,28 @@ def post(self, study_id: int): "additionalProperties": False, "required": [ "project_id", + "reports", "dashboard_name", "dashboard_modules", ], "properties": { "project_id": {"type": "string", "minLength": 1}, + "reports": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "properties": { + "report_id": {"type": "string", "minLength": 0}, + "report_key": {"type": "string", "minLength": 1}, + "report_name": {"type": "string", "minLength": 1}, + }, + } + ] + }, + "minItems": 1, + }, "dashboard_name": {"type": "string", "minLength": 1}, "dashboard_modules": { "type": "array", @@ -166,7 +249,7 @@ def post(self, study_id: int): "id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "selected": {"type": "boolean"}, - "reportId": {"type": "string", "minLength": 0}, + "report_key": {"type": "string", "minLength": 1}, }, } ] @@ -175,7 +258,7 @@ def post(self, study_id: int): }, }, } - data: Union[Any, dict] = request.json + data: Union[Any, Dict[str, Any]] = request.json try: validate(request.json, schema) except ValidationError as e: @@ -183,31 +266,66 @@ def post(self, study_id: int): return e.message, 400 if len(data["project_id"]) < 1: return ( - f"redcap project_id is required to connect a dashboard: {data['project_id']}", + f"redcap project_id is required to connect a dashboard: \ + {data['project_id']}", + 400, + ) + if len(data["reports"]) < 1: + return ( + f"redcap reports are required to connect a dashboard: \ + {data['reports']}", 400, ) if len(data["dashboard_name"]) < 1: return ( - f"dashboard dashboard_name is required to connect a dashboard: {data['dashboard_name']}", + f"dashboard dashboard_name is required to connect a dashboard: \ + {data['dashboard_name']}", 400, ) if len(data["dashboard_modules"]) < 1: return ( - f"dashboard dashboard_modules is required to connect a dashboard: {data['dashboard_name']}", + f"dashboard dashboard_modules is required to connect a dashboard: \ + {data['dashboard_name']}", 400, ) - connect_redcap_project_dashboard = model.StudyRedcapProjectDashboard.from_data( - study, data + connect_redcap_project_dashboard_data = ( + model.StudyRedcapProjectDashboard.from_data(study, data) ) - model.db.session.add(connect_redcap_project_dashboard) + model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard = connect_redcap_project_dashboard.to_dict() + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 +@api.route("/study//dashboard-connector") +class RedcapProjectDashboardConnector(Resource): + """Get REDCap project dashboard connector""" + + @api.doc(parser=dashboard_parser) + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_connector_model) + def get(self, study_id: int): + """Get Study Redcap Project Dashboard""" + study = model.db.session.query(model.Study).get(study_id) + if is_granted("redcap_access", study): + return "Access denied, you can not get this dashboard", 403 + dashboard_id = dashboard_parser.parse_args()["dashboard_id"] + # Get Dashboard + redcap_project_dashboard_connector: Any = model.db.session.query( + model.StudyRedcapProjectDashboard + ).get(dashboard_id) + redcap_project_dashboard_connector = ( + redcap_project_dashboard_connector.to_dict() + ) + return redcap_project_dashboard_connector, 201 + + @api.route("/study//dashboard") class RedcapProjectDashboard(Resource): - """Get study REDCap project dashboard""" + """Get REDCap project dashboard""" @api.doc(parser=dashboard_parser) @api.response(200, "Success") @@ -220,38 +338,60 @@ def get(self, study_id: int): if is_granted("redcap_access", study): return "Access denied, you can not get this dashboard", 403 dashboard_id = dashboard_parser.parse_args()["dashboard_id"] - redcap_project_dashboard = model.db.session.query( + # Get Dashboard + redcap_project_dashboard: Any = model.db.session.query( model.StudyRedcapProjectDashboard ).get(dashboard_id) - # Execute REDCap Merge Transform redcap_project_dashboard = redcap_project_dashboard.to_dict() + # Get REDCap Project project_id = redcap_project_dashboard["project_id"] - redcap_project_view = ( - model.db.session.query(model.StudyRedcapProjectApi) - .get(project_id) - .to_dict() - ) - redcap_etl_config = transformConfigs["redcap"] - redcap_etl_config["redcap_api_url"] = redcap_project_view["project_api_url"] - redcap_etl_config["redcap_api_key"] = redcap_project_view["project_api_key"] + redcap_project_view: Any = model.db.session.query( + model.StudyRedcapProjectApi + ).get(project_id) + redcap_project_view = redcap_project_view.to_dict() + + # Set report_ids for ETL + for report in redcap_project_dashboard["reports"]: + for i, report_config in enumerate(transformConfigs["redcap"]["reports"]): + if ( + report["report_key"] == report_config["key"] + and len(report["report_id"]) > 0 + ): + transformConfigs["redcap"]["reports"][i]["kwdargs"] |= { + "report_id": report["report_id"] + } + + # Structure REDCap ETL Config + redcap_etl_config = { + "redcap_api_url": redcap_project_view["project_api_url"], + "redcap_api_key": redcap_project_view["project_api_key"], + } | transformConfigs["redcap"] + mergedTransform = RedcapTransform(redcap_etl_config).merged # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["dashboard_modules"]: transform, module_etl_config = transformConfigs[dashboard_module["id"]] + transformed = getattr(ModuleTransform(module_etl_config), transform)( + mergedTransform + ).transformed dashboard_module["visualizations"] = { "id": dashboard_module["id"], - "data": getattr(ModuleTransform(module_etl_config), transform)( - mergedTransform - ).transformed, + "data": transformed, } return redcap_project_dashboard, 201 + +@api.route("/study//dashboard/edit") +class EditRedcapProjectDashboard(Resource): + """Edit REDCap project dashboard""" + + @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) def put(self, study_id: int): - """Update study REDCap project dashboard""" + """Update REDCap project dashboard""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify this dashboard", 403 @@ -260,12 +400,31 @@ def put(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ + "project_id", + "reports", "dashboard_id", "dashboard_name", "dashboard_modules", ], "properties": { - "dashboard_id": {"type": "string", "minLength": 36, "maxLength": 37}, + "project_id": {"type": "string", "minLength": 1}, + "reports": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "properties": { + "report_id": {"type": "string", "minLength": 0}, + "report_key": {"type": "string", "minLength": 1}, + "report_name": {"type": "string", "minLength": 1}, + }, + } + ] + }, + "minItems": 1, + }, + "dashboard_id": {"type": "string", "minLength": 1}, "dashboard_name": {"type": "string", "minLength": 1}, "dashboard_modules": { "type": "array", @@ -277,7 +436,7 @@ def put(self, study_id: int): "id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "selected": {"type": "boolean"}, - "reportId": {"type": "string", "minLength": 0}, + "report_key": {"type": "string", "minLength": 1}, }, } ] @@ -286,53 +445,70 @@ def put(self, study_id: int): }, }, } - data: Union[Any, dict] = request.json + data: Union[Any, Dict[str, Any]] = request.json try: validate(request.json, schema) except ValidationError as e: - print(e) + print("validation error") return e.message, 400 - + if len(data["project_id"]) < 1: + return ( + f"redcap project_id is required to connect a dashboard: \ + {data['project_id']}", + 400, + ) + if len(data["reports"]) < 1: + return ( + f"redcap reports are required to connect a dashboard: \ + {data['reports']}", + 400, + ) if len(data["dashboard_id"]) < 1: return ( - f"dashboard dashboard_id is required to connect a dashboard: {data['dashboard_id']}", + f"dashboard dashboard_id is required to connect a dashboard: \ + {data['dashboard_id']}", 400, ) if len(data["dashboard_name"]) < 1: return ( - f"dashboard dashboard_name is required to connect a dashboard: {data['dashboard_name']}", + f"dashboard dashboard_name is required to connect a dashboard: \ + {data['dashboard_name']}", 400, ) if len(data["dashboard_modules"]) < 1: return ( - f"dashboard dashboard_modules is required to connect a dashboard: {data['dashboard_name']}", + f"dashboard dashboard_modules is required to connect a dashboard: \ + {data['dashboard_name']}", 400, ) # Clear Redis Cache # TODO: We want to clear the cache by dashboard_id, not the whole cache! cache.clear() - update_redcap_project_dashboard = model.StudyRedcapProjectDashboard.query.get( - data["dashboard_id"] + update_redcap_project_dashboard_query = ( + model.StudyRedcapProjectDashboard.query.get(data["dashboard_id"]) ) - update_redcap_project_dashboard.update(data) + update_redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard = update_redcap_project_dashboard.to_dict() + update_redcap_project_dashboard: Dict[ + str, Any + ] = update_redcap_project_dashboard_query.to_dict() return update_redcap_project_dashboard, 201 @api.route("/study//dashboard/delete") class DeleteRedcapProjectDashboard(Resource): + @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) - def post(self, study_id: int): - """Delete study REDCap project dashboard""" + def delete(self, study_id: int): + """Delete REDCap project dashboard""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not delete this redcap project", 403 - data: Union[Any, dict] = request.json + dashboard_id = dashboard_parser.parse_args()["dashboard_id"] model.StudyRedcapProjectDashboard.query.filter_by( - dashboard_id=data["dashboard_id"] + dashboard_id=dashboard_id ).delete() model.db.session.commit() return 204 diff --git a/apis/redcap.py b/apis/redcap.py index b1ed14de..89adbd89 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -63,7 +63,7 @@ class RedcapProjectAPIs(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model, as_list=True) def get(self, study_id: int): - """Get all study REDCap project API links""" + """Get all REDCap project API links""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return ( @@ -116,27 +116,32 @@ def post(self, study_id: int): if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: {data['project_title']}", + f"redcap project_title is required for redcap access: \ + {data['project_title']}", 400, ) if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: {data['project_id']}", + f"redcap project_id is required for redcap access: \ + {data['project_id']}", 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: {data['project_api_url']}", + f"redcap project_api_url is required for redcap access: \ + {data['project_api_url']}", 400, ) if len(data["project_api_key"]) < 1: return ( - f"redcap project_api_key is required for redcap access: {data['project_api_key']}", + f"redcap project_api_key is required for redcap access: \ + {data['project_api_key']}", 400, ) - if type(data["project_api_active"]) is not bool: + if isinstance(data["project_api_active"], bool): return ( - f"redcap project_api_active is required for redcap access: {data['project_api_active']}", + f"redcap project_api_active is required for redcap access: \ + {data['project_api_active']}", 400, ) @@ -154,22 +159,26 @@ class RedcapProjectAPI(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) def get(self, study_id: int): - """Get study REDCap project API link""" + """Get REDCap project API link""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not get this redcap project", 403 project_id = project_parser.parse_args()["project_id"] - redcap_project_view = model.db.session.query(model.StudyRedcapProjectApi).get( - project_id - ) + redcap_project_view: Any = model.db.session.query( + model.StudyRedcapProjectApi + ).get(project_id) redcap_project_view = redcap_project_view.to_dict() return redcap_project_view, 201 + +@api.route("/study//redcap/edit") +class EditRedcapProjectAPI(Resource): + @api.doc(parser=project_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) def put(self, study_id: int): - """Update study REDCap project API link""" + """Update REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify this redcap project", 403 @@ -198,22 +207,26 @@ def put(self, study_id: int): if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: {data['project_id']}", + f"redcap project_id is required for redcap access: \ + {data['project_id']}", 400, ) if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: {data['project_title']}", + f"redcap project_title is required for redcap access: \ + {data['project_title']}", 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: {data['project_api_url']}", + f"redcap project_api_url is required for redcap access: \ + {data['project_api_url']}", 400, ) - if type(data["project_api_active"]) is not bool: + if isinstance(data["project_api_active"], bool): return ( - f"redcap project_api_active is required for redcap access: {data['project_api_active']}", + f"redcap project_api_active is required for redcap access: \ + {data['project_api_active']}", 400, ) @@ -228,17 +241,16 @@ def put(self, study_id: int): @api.route("/study//redcap/delete") class DeleteRedcapProjectAPI(Resource): + @api.doc(parser=project_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) - def post(self, study_id: int): - """Delete study REDCap project API link""" + def delete(self, study_id: int): + """Delete REDCap project API link""" study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not delete this redcap project", 403 - data: Union[Any, dict] = request.json - model.StudyRedcapProjectApi.query.filter_by( - project_id=data["project_id"] - ).delete() + project_id = project_parser.parse_args()["project_id"] + model.StudyRedcapProjectApi.query.filter_by(project_id=project_id).delete() model.db.session.commit() return 204 diff --git a/app.py b/app.py index 8d164d76..9bc8ed71 100644 --- a/app.py +++ b/app.py @@ -100,14 +100,33 @@ def create_app(config_module=None): @app.cli.command("create-schema") def create_schema(): + """Create the database schema.""" engine = model.db.session.get_bind() metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + if len(table_names) == 0: + with engine.begin(): + model.db.create_all() + + @app.cli.command("destroy-schema") + def destroy_schema(): + """Create the database schema.""" + engine = model.db.session.get_bind() + with engine.begin(): + model.db.drop_all() + + @app.cli.command("cycle-schema") + def cycle_schema(): + """Destroy then re-create the database schema.""" + engine = model.db.session.get_bind() + with engine.begin(): + model.db.drop_all() metadata = MetaData() metadata.reflect(bind=engine) table_names = [table.name for table in metadata.tables.values()] if len(table_names) == 0: with engine.begin(): - """Create the database schema.""" model.db.create_all() @app.before_request @@ -205,13 +224,6 @@ def on_after_request(resp): def validation_exception_handler(error): return error.args[0], 422 - @app.cli.command("destroy-schema") - def destroy_schema(): - """Create the database schema.""" - engine = model.db.session.get_bind() - with engine.begin(): - model.db.drop_all() - with app.app_context(): engine = model.db.session.get_bind() metadata = MetaData() diff --git a/model/study_redcap_project_api.py b/model/study_redcap_project_api.py index 1ed408da..2f435442 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap_project_api.py @@ -1,6 +1,7 @@ import uuid from dataclasses import dataclass from datetime import datetime, timezone +from typing import Any, Dict, List from model import Study @@ -13,23 +14,14 @@ class StudyRedcapProjectApi(db.Model): # type: ignore A REDCap Project API is associated a study """ - study_id: str - project_id: int - project_title: str - project_api_url: str - project_api_key: str - project_api_active: bool - created_at: int - updated_on: int - __tablename__: str = "study_redcap_project_api" project_id: int = db.Column(db.BigInteger, primary_key=True) project_title: str = db.Column(db.String, nullable=False) project_api_url: str = db.Column(db.String, nullable=False) project_api_key: str = db.Column(db.String, nullable=False) project_api_active: bool = db.Column(db.Boolean, nullable=False) - created_at: int = db.Column(db.BigInteger, nullable=False) - updated_on: int = db.Column(db.BigInteger, nullable=False) + created_at: float = db.Column(db.BigInteger, nullable=False) + updated_on: float = db.Column(db.BigInteger, nullable=False) study_id: str = db.Column( db.CHAR(36), @@ -48,7 +40,7 @@ def __init__(self, study): self.id = str(uuid.uuid4()) self.created_at = datetime.now(timezone.utc).timestamp() - def to_dict(self): + def to_dict(self) -> Dict: """Converts the study to a dictionary""" return { "project_id": self.project_id, @@ -59,22 +51,28 @@ def to_dict(self): } @staticmethod - def from_data(study: Study, data: dict): + def from_data(study: Study, data: Dict) -> Any: """Creates a new study from a dictionary""" study_redcap_project_api = StudyRedcapProjectApi(study) study_redcap_project_api.update(data) return study_redcap_project_api - def update(self, data: dict): + def update(self, data: Dict) -> Any: """Updates the study from a dictionary""" - assignable = {key for key in self.to_dict().keys() if key.startswith("project")} + user_updatable_props = [ + "project_id", + "project_title", + "project_api_url", + "project_api_key", + "project_api_active", + ] for key, val in data.items(): - if key in assignable: + if key in user_updatable_props: setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() return self - def validate(self): + def validate(self) -> List: """Validates the study""" violations: list = [] return violations diff --git a/model/study_redcap_project_dashboard.py b/model/study_redcap_project_dashboard.py index 0327ef69..3df4c3c4 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_redcap_project_dashboard.py @@ -1,6 +1,7 @@ import uuid from dataclasses import dataclass from datetime import datetime, timezone +from typing import Any, Dict, List from sqlalchemy_json import NestedMutableJson @@ -16,21 +17,15 @@ class StudyRedcapProjectDashboard(db.Model): # type: ignore REDCap Project, which is part of a study """ - project_id: int - dashboard_id: str - dashboard_name: str - dashboard_modules: list[dict[str, (str | bool | int)]] - created_at: int - updated_on: int - __tablename__: str = "study_redcap_project_dashboard" dashboard_id: str = db.Column(db.CHAR(36), primary_key=True) dashboard_name: str = db.Column(db.String, nullable=False) dashboard_modules: list[dict[str, (str | bool | int)]] = db.Column( NestedMutableJson, nullable=True ) - created_at: int = db.Column(db.BigInteger, nullable=False) - updated_on: int = db.Column(db.BigInteger, nullable=False) + reports: list[dict[str, str]] = db.Column(NestedMutableJson, nullable=True) + created_at: float = db.Column(db.BigInteger, nullable=False) + updated_on: float = db.Column(db.BigInteger, nullable=False) project_id: int = db.Column( db.BigInteger, db.ForeignKey("study_redcap_project_api.project_id", ondelete="CASCADE"), @@ -55,38 +50,41 @@ def __init__(self, study): self.dashboard_id = str(uuid.uuid4()) self.created_at = datetime.now(timezone.utc).timestamp() - def to_dict(self): + def to_dict(self) -> Dict: """Converts the study to a dictionary""" return { "project_id": self.project_id, "dashboard_id": self.dashboard_id, "dashboard_name": self.dashboard_name, "dashboard_modules": self.dashboard_modules, + "reports": self.reports, "created_at": self.created_at, "updated_on": self.updated_on, } @staticmethod - def from_data(study: Study, data: dict): + def from_data(study: Study, data: Dict) -> Any: """Creates a new study from a dictionary""" study_redcap_project_dashboard = StudyRedcapProjectDashboard(study) study_redcap_project_dashboard.update(data) return study_redcap_project_dashboard - def update(self, data: dict): + def update(self, data: Dict) -> Any: """Updates the study from a dictionary""" - assignable = { - key - for key in self.to_dict().keys() - if key.startswith("project") or key.startswith("dashboard") - } + user_updatable_props = [ + "project_id", + "dashboard_id", + "dashboard_name", + "dashboard_modules", + "reports", + ] for key, val in data.items(): - if key in assignable: + if key in user_updatable_props: setattr(self, key, val) self.updated_on = datetime.now(timezone.utc).timestamp() return self - def validate(self): + def validate(self) -> List: """Validates the study""" violations: list = [] return violations diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 1d05f8de..ee7b05e2 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -1,3 +1,4 @@ +from typing import Any, Dict, List, Tuple import numpy as np from datetime import datetime @@ -10,15 +11,15 @@ # Value assigned to missing values unless other specific value defined on function call # (e.g. REDCapTransform.map_missing_values_by_columns(df, columns, new_missing_value)) -missing_value_generic = "Value Unavailable" +missing_value_generic: str = "Value Unavailable" # Utility Column Groups -index_columns = [ +index_columns: List = [ "record_id", ] # Data Column Groups -data_columns = [ +data_columns: List = [ "studyid", "siteid", "dm", @@ -51,8 +52,13 @@ "scrcmpdat", ] +computed_columns: List = [ + "phenotype", + "treatments", +] + # Survey Column Groups -survey_columns = [ +survey_columns: List = [ "screening_survey_complete", "study_enrollment_complete", "recruitment_survey_complete", @@ -93,80 +99,86 @@ ] # Repeat Survey Column Groups -repeat_survey_columns = [ +repeat_survey_columns: List = [ "current_medications_complete", ] -repeat_survey_data_columns = ["current_medications_complete", "current_medications"] +repeat_survey_data_columns: List = [ + "current_medications_complete", + "current_medications" +] # # Value Maps # -survey_instrument_map = { +survey_instrument_map: Dict[str, str] = { "2": "Complete", "1": "Unverified", "0": "Incomplete", } +phenotype_column_map: Dict[str, str] = { + "mhterm_dm2": "Type II Diabetes", + "mhterm_predm": "Prediabetes", + "mh_a1c": "Elevated A1C", +} + +treatments_column_map: Dict[str, str] = { + "cmtrt_a1c": "Oral Medication", + "cmtrt_glcs": "Non-Insulin Injectable", + "cmtrt_insln": "Insuling Injectable", + "cmtrt_lfst": "Lifestyle Management" +} + # # REDCap Transform Config # -redcapTransformConfig = { +# Note: The REDCap report_id is matched to the transform +# by the value of the key property in the report dictionary. +redcapTransformConfig: Dict[str, List[Any]|Tuple[str, List[Any]]|str|List] = { "reports": [ - ( - "participant-value", - {"report_id": 242544}, - [ + { + "key": "participant-value", + "kwdargs": {}, + "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), ("map_missing_values_by_columns", {"columns": data_columns}), - ("keep_columns", {"columns": index_columns + data_columns}), + ("new_column_from_binary_columns_positive_class", {"column_name_map": phenotype_column_map, "new_column_name": "phenotype"}), + ("new_column_from_binary_columns_positive_class", {"column_name_map": treatments_column_map, "new_column_name": "treatments"}), + ("keep_columns", {"columns": index_columns + data_columns + computed_columns}), ], - ), - ( - "instrument-status", - {"report_id": 251954}, - [ - ( - "remap_values_by_columns", - {"columns": survey_columns, "value_map": survey_instrument_map}, - ), + }, + { + "key": "instrument-status", + "kwdargs": {}, + "transforms": [ + ("remap_values_by_columns", {"columns": survey_columns, "value_map": survey_instrument_map}), ("map_missing_values_by_columns", {"columns": survey_columns}), ("keep_columns", {"columns": index_columns + survey_columns}), ], - ), - ( - "repeat_instrument", - {"report_id": 259920}, - [ + }, + { + "key": "repeat-instrument", + "kwdargs": {}, + "transforms": [ ("drop_rows", {"columns": repeat_survey_columns}), - ( - "aggregate_repeat_instrument_column_by_index", - {"aggregator": np.max, "dtype": str}, - ), - ( - "keep_columns", - {"columns": index_columns + repeat_survey_data_columns}, - ), + ("aggregate_repeat_instrument_by_index", {"aggregator": np.max, "dtype": str}), + ("keep_columns", {"columns": index_columns + repeat_survey_data_columns}), ], - ), + }, ], - "merge_transformed_reports": ( - "participant-value", - [ + "post_transform_merge": ( + "participant-value", [ + # ("participant-value", {"on": index_columns, "how": "inner"}), ("instrument-status", {"on": index_columns, "how": "inner"}), - ( - "repeat_instrument", - {"on": index_columns, "how": "outer"}, - ), + ("repeat-instrument", {"on": index_columns, "how": "outer"}), + # ("repeat-instrument", {"on": index_columns, "how": "outer"}), ], ), "post_merge_transforms": [ - ( - "remap_values_by_columns", - {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, - ), + ("remap_values_by_columns",{"columns": repeat_survey_columns, "value_map": survey_instrument_map}), ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), ], "index_columns": ["record_id"], @@ -178,7 +190,7 @@ # # Overview -overviewTransformConfig = ( +overviewTransformConfig: Tuple[str, Dict[str, Any]] = ( "compoundTransform", { "key": "overview", @@ -1737,179 +1749,74 @@ }, ) -# Sex & Gender Counts by Site -recruitmentTransformConfig = ( +# Recruitment Counts by Site +recruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { "key": "recruitment", "strict": True, - "transforms": { - "name": "Recruitment", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["siteid", "scrcmpdat", "race"], - "value": "scrcmpdat", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "datetime": { - "name": "Date", - "field": "scrcmpdat", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "scrcmpdat", - "missing_value": missing_value_generic, - "remap": lambda x: int( - datetime.fromisoformat(x["record"]["scrcmpdat"]).strftime( - "%Y%m%d" - ) - ), - # key, accessors, name, record - "astype": int, - }, - }, - }, - }, -) - -# Sex & Gender Counts by Site -sexGenderTransformConfig = ( - "simpleTransform", - { - "key": "sex-gender", - "strict": True, - "transforms": { - "name": "Sex & Gender", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "scrsex", "genderid"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Gender", - "field": "genderid", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Gender", - "field": "genderid", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, + "transforms": [ + { + "name": "Recruitment", + "vtype": "DoubleDiscrete", + "methods": [ + { + "groups": ["siteid", "race", "scrcmpdat"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astyoe": str, + }, + "subgroup": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "scrcmpdat", + "missing_value": missing_value_generic, + "astype": int, + "remap": lambda x: datetime.strptime(x["record"][x["accessors"]["x"]["field"]], "%Y-%m-%d").isocalendar().week + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, }, }, - }, + ] }, ) # Race & Ethnicity Counts by Site -raceEthnicityTransformConfig = ( +raceEthnicityTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { "key": "race-ethnicity", "strict": True, - "transforms": { - "name": "Race & Ethnicity", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "race", "ethnic"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Ethnicity", - "field": "ethnic", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "name": "Ethnicity", - "field": "ethnic", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - }, -) - -# Phenotypes -phenotypesTransformConfig = ( - "compoundTransform", - { - "key": "phenotype", - "strict": True, "transforms": [ { - "name": "Prediabetes", - "vtype": "SingleCategorical", + "name": "Race & Ethnicity", + "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "mhterm_predm"], + "groups": ["siteid", "race", "ethnic"], "value": "record_id", "func": "count", } @@ -1922,20 +1829,20 @@ "astype": str, }, "group": { - "name": "Prediabetes", - "field": "mhterm_predm", - "remap": lambda x: "Prediabetes" - if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" - else "No", + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Ethnicity", + "field": "ethnic", "missing_value": missing_value_generic, "astype": str, }, "color": { - "name": "Prediabetes", - "field": "mhterm_predm", - "remap": lambda x: "Prediabetes" - if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" - else "No", + "name": "Ethnicity", + "field": "ethnic", "missing_value": missing_value_generic, "astype": str, }, @@ -1947,12 +1854,23 @@ }, }, }, + ] + }, +) + +# Sex & Gender Counts by Site +sexGenderTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "sex-gender", + "strict": True, + "transforms": [ { - "name": "Type I Diabetes", - "vtype": "SingleCategorical", + "name": "Sex & Gender", + "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "mhterm_dm1"], + "groups": ["siteid", "scrsex", "genderid"], "value": "record_id", "func": "count", } @@ -1962,23 +1880,22 @@ "name": "Site", "field": "siteid", "missing_value": missing_value_generic, - "astype": str, }, "group": { - "name": "Type I Diabetes", - "field": "mhterm_dm1", - "remap": lambda x: "Type I Diabetes" - if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" - else "No", + "name": "Gender", + "field": "genderid", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, "color": { - "name": "Type I Diabetes", - "field": "mhterm_dm1", - "remap": lambda x: "Type I Diabetes" - if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" - else "No", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, @@ -1990,12 +1907,163 @@ }, }, }, + ] + }, +) + +# # Phenotypes +# phenotypesTransformConfig: Tuple[str, Dict[str, Any]] = ( +# "compoundTransform", +# { +# "key": "phenotype", +# "strict": True, +# "transforms": [ +# { +# "name": "Prediabetes", +# "vtype": "SingleCategorical", +# "methods": [ +# { +# "groups": ["siteid", "mhterm_predm"], +# "value": "record_id", +# "func": "count", +# } +# ], +# "accessors": { +# "filterby": { +# "name": "Site", +# "field": "siteid", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "group": { +# "name": "Prediabetes", +# "field": "mhterm_predm", +# "remap": lambda x: "Prediabetes" +# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" +# else "No", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "color": { +# "name": "Prediabetes", +# "field": "mhterm_predm", +# "remap": lambda x: "Prediabetes" +# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" +# else "No", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "value": { +# "name": "Count (N)", +# "field": "record_id", +# "missing_value": missing_value_generic, +# "astype": int, +# }, +# }, +# }, +# { +# "name": "Type I Diabetes", +# "vtype": "SingleCategorical", +# "methods": [ +# { +# "groups": ["siteid", "mhterm_dm1"], +# "value": "record_id", +# "func": "count", +# } +# ], +# "accessors": { +# "filterby": { +# "name": "Site", +# "field": "siteid", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "group": { +# "name": "Type I Diabetes", +# "field": "mhterm_dm1", +# "remap": lambda x: "Type I Diabetes" +# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" +# else "No", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "color": { +# "name": "Type I Diabetes", +# "field": "mhterm_dm1", +# "remap": lambda x: "Type I Diabetes" +# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" +# else "No", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "value": { +# "name": "Count (N)", +# "field": "record_id", +# "missing_value": missing_value_generic, +# "astype": int, +# }, +# }, +# }, +# { +# "name": "Type II Diabetes", +# "vtype": "SingleCategorical", +# "methods": [ +# { +# "groups": ["siteid", "mhterm_dm2"], +# "value": "record_id", +# "func": "count", +# } +# ], +# "accessors": { +# "filterby": { +# "name": "Site", +# "field": "siteid", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "group": { +# "name": "Type II Diabetes", +# "field": "mhterm_dm2", +# "remap": lambda x: "Type II Diabetes" +# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" +# else "No", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "color": { +# "name": "Type II Diabetes", +# "field": "mhterm_dm2", +# "remap": lambda x: "Type II Diabetes" +# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" +# else "No", +# "missing_value": missing_value_generic, +# "astype": str, +# }, +# "value": { +# "name": "Count (N)", +# "field": "record_id", +# "missing_value": missing_value_generic, +# "astype": int, +# }, +# }, +# }, +# ], +# }, +# ) + +# Phenotypes +phenotypesTransformConfig: Tuple[str, Dict[str, Any]] = ( + "compoundTransform", + { + "key": "phenotype", + "strict": True, + "transforms": [ { - "name": "Type II Diabetes", + "name": "Phenotype", "vtype": "SingleCategorical", "methods": [ { - "groups": ["siteid", "mhterm_dm2"], + "groups": ["siteid", "phenotype"], "value": "record_id", "func": "count", } @@ -2008,20 +2076,14 @@ "astype": str, }, "group": { - "name": "Type II Diabetes", - "field": "mhterm_dm2", - "remap": lambda x: "Type II Diabetes" - if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" - else "No", + "name": "Phenotype", + "field": "phenotype", "missing_value": missing_value_generic, "astype": str, }, "color": { - "name": "Type II Diabetes", - "field": "mhterm_dm2", - "remap": lambda x: "Type II Diabetes" - if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" - else "No", + "name": "Phenotype", + "field": "mhterm_predm", "missing_value": missing_value_generic, "astype": str, }, @@ -2032,60 +2094,60 @@ "astype": int, }, }, - }, + } ], }, ) -currentMedicationsTransformConfig = ( +currentMedicationsTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { "key": "current-medications", "strict": True, - "transforms": { - "name": "Current Medications", - "vtype": "SingleCategorical", - "methods": [ - { - "groups": ["siteid", "current_medications_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Current Medications Status", - "field": "current_medications_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "color": { - "remap": lambda x: x["name"], - "name": "Current Medications Status", - "field": "current_medications_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Current Medications (N)", - "field": "current_medications", - "missing_value": missing_value_generic, - "astype": int, + "transforms": [ + { + "name": "Current Medications", + "vtype": "SingleCategorical", + "methods": [ + { + "groups": ["siteid"], + "value": "current_medications", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Current Medications (N)", + "field": "current_medications", + "missing_value": missing_value_generic, + "astype": int, + }, }, - }, - }, + } + ] }, ) -transformConfigs = { +transformConfigs: Dict[str, Any] = { "redcap": redcapTransformConfig, "overview": overviewTransformConfig, "recruitment": recruitmentTransformConfig, diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index d492d30d..fcfbdf36 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -11,7 +11,7 @@ class ModuleTransform(object): def __init__( self, - config: Dict[str, Dict[str, Any]], + config: Dict[str, Any], logging_config: Dict[str, str] = {}, ) -> None: # @@ -19,30 +19,26 @@ def __init__( # # Logging Config Checks - self.logging_config = {} - self.logging_config["encoding"] = ( - logging_config["encoding"] if "encoding" in logging_config else "utf-8" - ) - self.logging_config["filename"] = ( - logging_config["filename"] - if "filename" in logging_config - else "REDCapETL.log" - ) - self.logging_config["level"] = ( - getattr(logging, logging_config["level"].upper) - if "level" in logging_config - else logging.DEBUG + self.logging_config = ( + config["logging_config"] + if "logging_config" in config + else { + "encoding": "utf-8", + "filename": "REDCapETL.log", + "level": logging.DEBUG, + } ) # Configure Logging - logging.basicConfig(**self.logging_config) + logging.basicConfig(**self.logging_config); self.logger = logging.getLogger("VizModTransform") # # References # - self.valid = True + self.valid: bool = True + self.transformed: Any # # Visualization Variables @@ -53,14 +49,14 @@ def __init__( self.key = config["key"] if "key" in config else None - self.transforms = config["transforms"] if "transforms" in config else None + self.transforms: List[Dict[str, Any]] = config["transforms"] - if self.transforms is None: + if len(self.transforms) < 1: self.valid = False raise ValueError( f"ModuleTransform instantiation missing transforms argument" ) - elif (type(self.transforms) != list) and (type(self.transforms) != dict): + elif (type(self.transforms) != list): self.valid = False raise ValueError( f"ModuleTransform argument transforms must be a list or dict type" @@ -70,11 +66,8 @@ def __init__( pass # Normalize Transforms to List Type, Check Validity, and Warn on Missing Attributes - self.transformList = ( - self.transforms if type(self.transforms) == list else [self.transforms] - ) - for transform in enumerate(self.transformList): - self.valid = True if self._transformIsValid(transform) else False + for indexed_transform in enumerate(self.transforms): + self.valid = True if self._transformIsValid(indexed_transform) else False if self.strict and not self.valid: raise ValueError( f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" @@ -84,11 +77,11 @@ def __init__( return - def _transformIsValid(self, transform: Tuple[int, Dict[str, Any]]) -> bool: + def _transformIsValid(self, indexed_transform: Tuple[int, Dict[str, Any]]) -> bool: """ Transform validator """ - index, transform = transform + index, transform = indexed_transform valid = True if "name" not in transform: self.logger.error( @@ -118,7 +111,7 @@ def _setValueType( name: str, record: Dict[str, Any], key: str, - accessors: Dict[str, Dict[str, str | Callable]], + accessors: Dict[str, Dict[str, Any]], ) -> Any: """ Element-wise type setting method. If value of @@ -126,7 +119,6 @@ def _setValueType( value as the type defined for property in the vtype. """ - print(accessors, "\n") accessor = accessors[key] for pname, _ptype in vtype.props: if pname == key: @@ -141,7 +133,7 @@ def _setValueType( f"Accessor `{pname}` with type `{ptype}` conflicts with VType definition requiring {_ptype}" ) # Accessor Name - pvalue = record[accessor["field"]] + pvalue: Any = record[accessor["field"]] if "remap" in accessor and accessor["remap"] is not None: pvalue = accessor["remap"]( { @@ -181,8 +173,8 @@ def simpleTransform(self, df: pd.DataFrame) -> object: One transform for one VType. """ self.transformed = [] - transform = ( - self.transformList.pop() + transform: Dict[str, Any] = ( + self.transforms.pop() ) # simple transforms have only one transform object name, vtype, methods, accessors = ( transform["name"], @@ -232,7 +224,7 @@ def compoundTransform(self, df: pd.DataFrame) -> object: """ self.transformed = [] - for transform in self.transformList: + for transform in self.transforms: name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), @@ -284,8 +276,7 @@ def mixedTransform(self, df: pd.DataFrame) -> object: Transforms can be heterogenous VTypes. """ self.transformed = {} - - for transform in self.transformList: + for transform in self.transforms: name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 3c9e3cf6..0a97a1be 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -30,9 +30,9 @@ def __init__(self, config: dict) -> None: self.reports_configs = config["reports"] if "reports" in config else [] # Report Merging - self.merge_transformed_reports = ( - config["merge_transformed_reports"] - if "merge_transformed_reports" in config + self.post_transform_merge = ( + config["post_transform_merge"] + if "post_transform_merge" in config else [] ) @@ -77,23 +77,18 @@ def __init__(self, config: dict) -> None: # # Regex Complex Field Parsers - self._field_rgx = {} - self._field_rgx["radio"] = re.compile(r"^[0-9\.]{1,17}") - self._field_rgx["checkbox"] = re.compile(r"^[0-9\.]{1,17}") - self._field_rgx["dropdown"] = re.compile(r"^[0-9\.]{1,17}") - self._field_rgx["yesno"] = re.compile(r"^[0-9\.]{1,17}") - self._field_rgx["text"] = re.compile( - r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}" - ) - self._field_rgx["descriptive"] = re.compile( - r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}" - ) - self._field_rgx["notes"] = re.compile( - r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}" - ) - self._field_rgx["file"] = None - self._field_rgx["signature"] = None - self._field_rgx["calc"] = None + self._field_rgx = { + "radio": re.compile(r"^[0-9\.]{1,17}"), + "checkbox": re.compile(r"^[0-9\.]{1,17}"), + "dropdown": re.compile(r"^[0-9\.]{1,17}"), + "yesno": re.compile(r"^[0-9\.]{1,17}"), + "text": re.compile(r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}"), + "descriptive": re.compile(r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}"), + "notes": re.compile(r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}"), + "file": re.compile(r".*"), + "signature": re.compile(r".*"), + "calc": re.compile(r".*"), + } # General Parsing Variables self.none_values = [ @@ -132,39 +127,41 @@ def __init__(self, config: dict) -> None: # Internal Defaults # - Key Assumptions for Transform Functions - # – Only Update if REDCap API Updates - self._reports_kwdargs = {} - self._reports_kwdargs["raw_or_label"] = "raw" - self._reports_kwdargs["raw_or_label_headers"] = "raw" - self._reports_kwdargs["export_checkbox_labels"] = False - self._reports_kwdargs["csv_delimiter"] = "\t" - + # – Only Update if REDCap API and/or PyCap Update + self._reports_kwdargs = { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t" + } # Get & Structure Report self.logger.info(f"Retrieving REDCap reports") self.reports = {} - for report_name, report_kwdargs, transforms in self.reports_configs: + for report_config in self.reports_configs: # Get Report - report_kwdargs = report_kwdargs | self._reports_kwdargs + report_key = report_config["key"] + report_kwdargs = report_config["kwdargs"] | self._reports_kwdargs + report_transforms = report_config["transforms"] report = self.project.export_report(**report_kwdargs) # Structure Reports - self.reports[report_name] = { + self.reports[report_key] = { "id": report_kwdargs["report_id"], "report": report, "df": pd.DataFrame(report), - "transforms": transforms, + "transforms": report_transforms, "transformed": None, "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), } # Generate Transformed Report self.logger.info(f"Applying REDCap report transforms") - for report_name, report_object in self.reports.items(): - self._apply_report_transforms(report_name) + for report_key, report_object in self.reports.items(): + self._apply_report_transforms(report_key) # Merge Reports self.logger.info(f"Merging REDCap reports") - receiving_report_name, merge_steps = self.merge_transformed_reports - self.merged = self._merge_reports(receiving_report_name, merge_steps) + receiving_report_key, merge_steps = self.post_transform_merge + self.merged = self._merge_reports(receiving_report_key, merge_steps) # Apply Post-Merge Transforms self.logger.info(f"Applying REDCap report post-merge transforms") @@ -181,60 +178,60 @@ def __init__(self, config: dict) -> None: # Getters # - def get_report_id(self, report_name: str) -> str: + def get_report_id(self, report_key: str) -> str: """ Returns a str instance of the REDCap report ID. """ - return self.reports[report_name]["id"] + return self.reports[report_key]["id"] def get_report_pycap( - self, report_name: str + self, report_key: str ) -> Union[List[Dict[str, Any]], str, pd.DataFrame]: """ Returns a PyCap Report object containing the report. """ - return self.reports[report_name]["report"] + return self.reports[report_key]["report"] - def get_report_df(self, report_name: str) -> pd.DataFrame: + def get_report_df(self, report_key: str) -> pd.DataFrame: """ Returns a pd.DataFrame instance containing the report. """ - return self.reports[report_name]["df"] + return self.reports[report_key]["df"] - def get_report_transformed_df(self, report_name: str) -> pd.DataFrame: + def get_report_transformed_df(self, report_key: str) -> pd.DataFrame: """ Returns a pd.DataFrame instance containing the report with normalization transforms applied. """ - return self.reports[report_name]["transformed"] + return self.reports[report_key]["transformed"] def get_report_transforms( - self, report_name: str + self, report_key: str ) -> List[Tuple[str, Dict[str, Any]]]: """ Returns a list of transforms that will be applied to the report """ - return self.reports[report_name]["transforms"] + return self.reports[report_key]["transforms"] - def get_report_annotations(self, report_name: str) -> List[Dict[str, Any]]: + def get_report_annotations(self, report_key: str) -> List[Dict[str, Any]]: """ Returns a list of annotations generated from the REDCap metadata API call. """ - return self.reports[report_name]["annotations"] + return self.reports[report_key]["annotations"] # # Transform Applicator # # Applies Declared Transforms to Reports - def _apply_report_transforms(self, report_name: str) -> None: + def _apply_report_transforms(self, report_key: str) -> None: """ Interal method that applies the transforms to each report as an idempotent transform stack. """ - report = self.reports[report_name] + report = self.reports[report_key] annotation = report["annotation"] report["transformed"] = report["df"] for transform in report["transforms"]: @@ -244,7 +241,7 @@ def _apply_report_transforms(self, report_name: str) -> None: report["transformed"], transform_name, transform_kwdargs ) - return self + return def apply_transform( self, @@ -272,7 +269,6 @@ def _drop_columns( df = df.drop(columns=columns) return df - @classmethod def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: """ Drop columns from pd.DataFrame. @@ -289,13 +285,12 @@ def _keep_columns( columns: List[str] = [], annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - columns = set(df.columns) - set( + columns = list(set(df.columns) - set( self._resolve_columns_with_dataframe(df=df, columns=columns) - ) + )) df = df.drop(columns=columns) return df - @classmethod def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: """ Keep only selected columns in pd.DataFrame. @@ -316,11 +311,10 @@ def _append_column_suffix( ) -> pd.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns) df[columns] = df[columns].rename( - mapper=lambda name: f"{namer}{separator}{suffix}" + mapper=lambda name: f"{name}{separator}{suffix}" ) return df - @classmethod def append_column_suffix( self, df: pd.DataFrame, @@ -336,8 +330,8 @@ def append_column_suffix( of column names by one or more characters, e.g. "_" for snakecase. """ - return self._prepend_column_suffix( - df=df, columns=transform_columns, suffix=suffix, separator=separator + return self._append_column_suffix( + df=df, columns=columns, suffix=suffix, separator=separator ) # @@ -358,7 +352,6 @@ def _prepend_column_prefix( ) return df - @classmethod def prepend_column_prefix( self, df: pd.DataFrame, @@ -392,6 +385,7 @@ def _remap_values_by_columns( # Resolve Mappable Fields and Available Value Maps columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + mappable_fields: List[Dict[str, Any]] if len(value_map) > 0: mappable_fields = [ {"name": column, "options": value_map} for column in columns @@ -400,7 +394,7 @@ def _remap_values_by_columns( mappable_fields = [ field for field in annotation - if field["options"] is not None and field["name"] in columns + if len(field["options"]) > 0 and field["name"] in columns ] for mappable_field in mappable_fields: @@ -421,7 +415,6 @@ def _remap_values_by_columns( return df - @classmethod def remap_values_by_columns( self, df: pd.DataFrame, @@ -478,7 +471,6 @@ def _map_missing_values_by_columns( return df - @classmethod def map_missing_values_by_columns( self, df: pd.DataFrame, columns: List[str], missing_value: Any ) -> pd.DataFrame: @@ -509,7 +501,6 @@ def _drop_rows( df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] return df - @classmethod def drop_rows( self, df: pd.DataFrame, @@ -525,14 +516,16 @@ def drop_rows( # Transforms - Aggregation # + # ... + # # Transforms - Aggregate Repeat Instruments by Index # - def _aggregate_repeat_instrument_column_by_index( + def _aggregate_repeat_instrument_by_index( self, df: pd.DataFrame, - aggregator: Callable = "max", + aggregator: str = "max", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: @@ -551,7 +544,6 @@ def _aggregate_repeat_instrument_column_by_index( df[column] = df[column].astype(dtype) return df - @classmethod def aggregate_repeat_instrument_by_index( self, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float ) -> pd.DataFrame: @@ -561,28 +553,72 @@ def aggregate_repeat_instrument_by_index( using an aggregation function applied to the repeat_instance field. """ - return self._aggregate_repreat_instrument_by_index( + return self._aggregate_repeat_instrument_by_index( df=df, aggregator=aggregator, dtype=dtype ) + # + # Generate New Columns + # + + def _new_column_from_binary_columns_positive_class( + self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, annotation: List[Dict[str, Any]] = [] + ) -> pd.DataFrame: + new_column_name = new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) + df[new_column_name] = df[list(column_name_map.keys())].idxmax(axis=1).map(column_name_map) + return df + + def new_column_from_binary_columns_positive_class( + self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float + ) -> pd.DataFrame: + """ + Pre-processing REDCap repeat_instrument so each instrument + has its own column and the value. The value is computed + using an aggregation function applied to the repeat_instance + field. + """ + return self._new_column_from_binary_columns_positive_class( + df=df, column_name_map=column_name_map, new_column_name = new_column_name, dtype=dtype + ) + + def _new_column_from_binary_columns_negative_class( + self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float + ) -> pd.DataFrame: + new_column_name = new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) + df[new_column_name] = df[list(column_name_map.keys())].idxmin(axis=1) + return df + + def new_column_from_binary_columns_negative_class( + self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float + ) -> pd.DataFrame: + """ + Pre-processing REDCap repeat_instrument so each instrument + has its own column and the value. The value is computed + using an aggregation function applied to the repeat_instance + field. + """ + return self._new_column_from_binary_columns_negative_class( + df=df, column_name_map=column_name_map, new_column_name=new_column_name, dtype=dtype + ) + # # Report Merging # def _merge_reports( self, - receiving_report_name: str, + receiving_report_key: str, merge_steps: List[Tuple[str, Dict[str, Any]]], ) -> pd.DataFrame: """ - Performns N - 1 merge transforms on N reports. + Performs N - 1 merge transforms on N reports. """ - df_receiving_report = self.reports[receiving_report_name]["transformed"] + df_receiving_report = self.reports[receiving_report_key]["transformed"] if len(merge_steps) > 0: - for providing_report_name, merge_kwdargs in merge_steps: - df_providing_report = self.reports[providing_report_name]["transformed"] + for providing_report_key, merge_kwdargs in merge_steps: + df_providing_report = self.reports[providing_report_key]["transformed"] df_receiving_report = df_receiving_report.merge( df_providing_report, **merge_kwdargs ) @@ -638,9 +674,9 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: # REDCap Internal Variable Metadata metadata = [ - {"name": "redcap_data_access_group", "type": "text", "options": None}, - {"name": "redcap_repeat_instrument", "type": "text", "options": None}, - {"name": "redcap_repeat_instance", "type": "number", "options": None}, + {"name": "redcap_data_access_group", "type": "text", "options": {}}, + {"name": "redcap_repeat_instrument", "type": "text", "options": {}}, + {"name": "redcap_repeat_instance", "type": "number", "options": {}}, ] field_types = set(field["field_type"] for field in self.metadata) @@ -654,7 +690,7 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: for field in sorted(self.metadata, key=lambda f: f["field_name"]): if field["field_name"] in columns: field_type = field["field_type"] - options = {} + options: dict = {} if field_type in complex_types: rgx = self._field_rgx[field_type] for option in field["select_choices_or_calculations"].split("|"): @@ -662,9 +698,9 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: option.split(",")[0], (",".join(option.split(",")[1:])).strip(), ) - k = int(k) if re.match(rgx, k) else str(k) - v = int(v) if re.match(rgx, v) else str(v) - options[str(k)] = v + _k = int(k) if re.match(rgx, k) else str(k) + _v = int(v) if re.match(rgx, v) else str(v) + options[str(_k)] = _v metadata.append( { "name": field["field_name"], @@ -685,7 +721,7 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: { "name": field["field_name"], "type": field["field_type"], - "options": None, + "options": {}, } ) elif field_type in skip_types: @@ -693,7 +729,7 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: { "name": field["field_name"], "type": field["field_type"], - "options": None, + "options": {}, } ) else: @@ -709,8 +745,8 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: def export_raw( self, path: str = "", separator: str = "\t", filetype: str = ".tsv" ) -> object: - for report_name, report_object in self.reports.items(): - filename = f"{report_name}_raw{filetype}" + for report_key, report_object in self.reports.items(): + filename = f"{report_key}_raw{filetype}" filepath = os.path.join(path, filename) transformed = report_object["df"] transformed.to_csv( @@ -725,8 +761,8 @@ def export_raw( def export_transformed( self, path: str = "", separator: str = "\t", filetype: str = ".tsv" ) -> object: - for report_name, report_object in self.reports.items(): - filename = f"{report_name}_transformed{filetype}" + for report_key, report_object in self.reports.items(): + filename = f"{report_key}_transformed{filetype}" filepath = os.path.join(path, filename) transformed = report_object["transformed"] transformed.to_csv( diff --git a/modules/etl/vtypes/__init__.py b/modules/etl/vtypes/__init__.py index c1d0804b..2b3e52d7 100644 --- a/modules/etl/vtypes/__init__.py +++ b/modules/etl/vtypes/__init__.py @@ -1,4 +1,4 @@ -from .vtype import VType +from .vtype import SimpleVType, ComplexVType from .categorical import SingleCategorical, DoubleCategorical from .discrete import SingleDiscrete, DoubleDiscrete from .continuous import SingleContinuous, DoubleContinuous diff --git a/modules/etl/vtypes/categorical.py b/modules/etl/vtypes/categorical.py index f414fa3d..0d980baf 100644 --- a/modules/etl/vtypes/categorical.py +++ b/modules/etl/vtypes/categorical.py @@ -1,7 +1,7 @@ -from .vtype import VType +from .vtype import SimpleVType -class SingleCategorical(VType): +class SingleCategorical(SimpleVType): def __init__(self) -> None: super(SingleCategorical, self).__init__( "SingleCategorical", @@ -14,8 +14,7 @@ def __init__(self) -> None: str, ) - -class DoubleCategorical(VType): +class DoubleCategorical(SimpleVType): def __init__(self) -> None: super(DoubleCategorical, self).__init__( "DoubleCategorical", @@ -29,7 +28,6 @@ def __init__(self) -> None: str, ) - if __name__ == "__main__": pass else: diff --git a/modules/etl/vtypes/compound.py b/modules/etl/vtypes/compound.py index 0e69b227..0d681232 100644 --- a/modules/etl/vtypes/compound.py +++ b/modules/etl/vtypes/compound.py @@ -1,4 +1,4 @@ -from .vtype import VType +from .vtype import ComplexVType from .categorical import SingleCategorical, DoubleCategorical from .discrete import SingleDiscrete, DoubleDiscrete from .continuous import SingleContinuous, DoubleContinuous @@ -10,10 +10,8 @@ from typing import Tuple, List, Dict, Callable, Any import pandas as pd - -class Compound(VType): +class Compound(ComplexVType): def __init__(self) -> None: - raise NotImplementedError super(Compound, self).__init__( "Compound", [ @@ -26,26 +24,12 @@ def __init__(self) -> None: SingleTimeseries, DoubleDiscreteTimeseries, DoubleContinuousTimeseries, + Compound, ], str, ) - def isvalid( - self, dfs: pd.DataFrame, accessors: List[Dict[str, Dict[str, str]]] - ) -> bool: - """ - Extends the VType.isvalid method to operate on a list - of pd.DataFrames and accessors. - """ - valid = True - accessorsList = [accessors] - for accessors in accessorsList: - if not super(Compound, self).isvalid(df, accessors): - self.validation_errors.append( - f"VType {self.name.title()} has invalid accessors. See additional details above." - ) - valid = False - else: - continue - - return valid +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/vtypes/continuous.py b/modules/etl/vtypes/continuous.py index 1e1dfdbe..a89b3f11 100644 --- a/modules/etl/vtypes/continuous.py +++ b/modules/etl/vtypes/continuous.py @@ -1,7 +1,6 @@ -from .vtype import VType +from .vtype import SimpleVType - -class SingleContinuous(VType): +class SingleContinuous(SimpleVType): def __init__(self) -> None: super(SingleContinuous, self).__init__( "SingleContinuous", @@ -9,8 +8,7 @@ def __init__(self) -> None: float, ) - -class DoubleContinuous(VType): +class DoubleContinuous(SimpleVType): def __init__(self) -> None: super(DoubleContinuous, self).__init__( "DoubleContinuous", @@ -24,7 +22,6 @@ def __init__(self) -> None: float, ) - if __name__ == "__main__": pass else: diff --git a/modules/etl/vtypes/discrete.py b/modules/etl/vtypes/discrete.py index d768dffa..ec32407f 100644 --- a/modules/etl/vtypes/discrete.py +++ b/modules/etl/vtypes/discrete.py @@ -1,7 +1,7 @@ -from .vtype import VType +from .vtype import SimpleVType -class SingleDiscrete(VType): +class SingleDiscrete(SimpleVType): def __init__(self) -> None: super(SingleDiscrete, self).__init__( "SingleDiscrete", @@ -10,7 +10,7 @@ def __init__(self) -> None: ) -class DoubleDiscrete(VType): +class DoubleDiscrete(SimpleVType): def __init__(self) -> None: super(DoubleDiscrete, self).__init__( "Discrete", @@ -23,3 +23,9 @@ def __init__(self) -> None: ], int, ) + + +if __name__ == "__main__": + pass +else: + pass diff --git a/modules/etl/vtypes/mixed.py b/modules/etl/vtypes/mixed.py index e0e5969e..ca2bfad1 100644 --- a/modules/etl/vtypes/mixed.py +++ b/modules/etl/vtypes/mixed.py @@ -1,5 +1,5 @@ from typing import Any, Callable, Union, List, Dict, Tuple -from .vtype import VType +from .vtype import ComplexVType from .categorical import SingleCategorical, DoubleCategorical from .discrete import SingleDiscrete, DoubleDiscrete from .continuous import SingleContinuous, DoubleContinuous @@ -12,7 +12,7 @@ import pandas as pd -class Mixed(VType): +class Mixed(ComplexVType): def __init__(self) -> None: raise NotImplementedError super(Mixed, self).__init__( @@ -32,24 +32,6 @@ def __init__(self) -> None: str, ) - def isvalid( - self, dfs: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] - ) -> bool: - """ - Extends the VType.isvalid method to operate on a list - of pd.DataFrames and accessors. - """ - valid = True - for accessors in accessorsList: - if not super(Compound, self).isvalid(df, accessors): - self.validation_errors.append( - f"VType {self.name.title()} has invalid accessors. See additional details above." - ) - valid = False - else: - continue - - if __name__ == "__main__": pass else: diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index ce587c95..4a2e169e 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -1,9 +1,9 @@ -from .vtype import VType +from .vtype import SimpleVType import pandas as pd from datetime import datetime -class SingleTimeseries(VType): +class SingleTimeseries(SimpleVType): def __init__(self) -> None: super(SingleTimeseries, self).__init__( "SingleTimeseries", @@ -17,7 +17,7 @@ def __init__(self) -> None: ) -class DoubleDiscreteTimeseries(VType): +class DoubleDiscreteTimeseries(SimpleVType): def __init__(self) -> None: super(DoubleDiscreteTimeseries, self).__init__( "DoubleDiscreteTimeseries", @@ -32,7 +32,7 @@ def __init__(self) -> None: ) -class DoubleContinuousTimeseries(VType): +class DoubleContinuousTimeseries(SimpleVType): def __init__(self) -> None: super(DoubleContinuousTimeseries, self).__init__( "DoubleContinuousTimeseries", diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index bf419886..382a6806 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -3,7 +3,7 @@ import pandas as pd -class VType(object): +class SimpleVType(object): def __init__( self, name: str, @@ -38,6 +38,62 @@ def isvalid(self, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]]) -> boo return False return True +class ComplexVType(object): + def __init__( + self, + name: str, + props: List[Any], + missing_value: Callable, + ) -> None: + self.name = name + self.props = props + self.missing_value = missing_value + # References + self.validation_errors: List[str] = [] + + def __str__(self): + return f"{self.__dict__}" + + # def isvalid( + # self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + # ) -> bool: + # """ + # Extends the VType.isvalid method to operate on a list + # of pd.DataFrames and accessors. + # """ + # valid = True + # for accessors in accessorsList: + # if not super(Compound, self).isvalid(df, accessors): + # self.validation_errors.append( + # f"VType {self.name.title()} has invalid accessors. See additional details above." + # ) + # valid = False + # else: + # continue + # return valid + + def isvalid(self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]]) -> bool: + valid = True + columns = df.columns + for accessors in accessorsList: + for pname, ptype in self.props: + if pname in accessors.keys(): + column = accessors[pname]["field"] + if column not in columns: + self.validation_errors.append( + f"VType {self.name.title()} pd.DataFrame argument (df) is missing column defined in accessors argument, {column}" + ) + valid = False + else: + continue + else: + self.validation_errors.append( + f"VType {self.name.title()} accessors argument is missing required property, {pname}" + ) + valid = False + return valid + + if __name__ == "__main__": pass diff --git a/sql/init.sql b/sql/init.sql index 2c54b874..b547dbec 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -956,6 +956,7 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( "study_id" CHAR(36) NOT NULL, "project_id" BIGINT NOT NULL, "dashboard_id" CHAR(36) NOT NULL, + "reports" UNKNOWN NOT NULL, "dashboard_name" VARCHAR NOT NULL, "dashboard_modules" UNKNOWN NOT NULL, "created_at" BIGINT NOT NULL, @@ -967,8 +968,8 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( -- Dumping data for table public.study_redcap_project_dashboard: 1 rows /*!40000 ALTER TABLE "study_redcap_project_dashboard" DISABLE KEYS */; -INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "reports", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', '{}', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_redcap_project_dashboard" ENABLE KEYS */; diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 92ab23a8..fc6b554b 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -1071,6 +1071,7 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( "study_id" CHAR(36) NOT NULL, "project_id" BIGINT NOT NULL, "dashboard_id" CHAR(36) NOT NULL, + "reports" UNKNOWN NOT NULL, "dashboard_name" VARCHAR NOT NULL, "dashboard_modules" VARCHAR[] NOT NULL, "created_at" BIGINT NOT NULL, @@ -1082,8 +1083,8 @@ CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( -- Dumping data for table public.study_redcap_project_dashboard: 1 rows /*!40000 ALTER TABLE "study_redcap_project_dashboard" DISABLE KEYS */; -INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "reports", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', '{}', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_redcap_project_dashboard" ENABLE KEYS */; diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index 2940e14e..45f4738e 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -70,12 +70,12 @@ INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title ('00000000-0000-0000-0000-000000000004', '44444', 'data-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), ('00000000-0000-0000-0000-000000000005', '55555', 'more-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'); -INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', '11111', '10000000-0000-0000-0000-000000000000', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000002', '22222', '20000000-0000-0000-0000-000000000000', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000003', '33333', '30000000-0000-0000-0000-000000000000', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000004', '44444', '40000000-0000-0000-0000-000000000000', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000005', '55555', '50000000-0000-0000-0000-000000000000', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "reports", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', '11111', '10000000-0000-0000-0000-000000000000', '{}', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '22222', '20000000-0000-0000-0000-000000000000', '{}', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '33333', '30000000-0000-0000-0000-000000000000', '{}', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '44444', '40000000-0000-0000-0000-000000000000', '{}', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '55555', '50000000-0000-0000-0000-000000000000', '{}', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; From d007c7603f2e4f2f2425483a62ce59befb657d8e Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 5 Dec 2023 02:45:48 +0000 Subject: [PATCH 376/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/config/aireadi_config.py | 64 ++++++++++++++++------ modules/etl/transforms/module_transform.py | 10 ++-- modules/etl/transforms/redcap_transform.py | 64 ++++++++++++++++------ modules/etl/vtypes/categorical.py | 2 + modules/etl/vtypes/compound.py | 2 + modules/etl/vtypes/continuous.py | 3 + modules/etl/vtypes/mixed.py | 1 + modules/etl/vtypes/vtype.py | 6 +- 8 files changed, 113 insertions(+), 39 deletions(-) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index ee7b05e2..3485b12a 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -105,7 +105,7 @@ repeat_survey_data_columns: List = [ "current_medications_complete", - "current_medications" + "current_medications", ] # @@ -128,7 +128,7 @@ "cmtrt_a1c": "Oral Medication", "cmtrt_glcs": "Non-Insulin Injectable", "cmtrt_insln": "Insuling Injectable", - "cmtrt_lfst": "Lifestyle Management" + "cmtrt_lfst": "Lifestyle Management", } # @@ -137,7 +137,7 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. -redcapTransformConfig: Dict[str, List[Any]|Tuple[str, List[Any]]|str|List] = { +redcapTransformConfig: Dict[str, List[Any] | Tuple[str, List[Any]] | str | List] = { "reports": [ { "key": "participant-value", @@ -145,16 +145,34 @@ "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), ("map_missing_values_by_columns", {"columns": data_columns}), - ("new_column_from_binary_columns_positive_class", {"column_name_map": phenotype_column_map, "new_column_name": "phenotype"}), - ("new_column_from_binary_columns_positive_class", {"column_name_map": treatments_column_map, "new_column_name": "treatments"}), - ("keep_columns", {"columns": index_columns + data_columns + computed_columns}), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": phenotype_column_map, + "new_column_name": "phenotype", + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": treatments_column_map, + "new_column_name": "treatments", + }, + ), + ( + "keep_columns", + {"columns": index_columns + data_columns + computed_columns}, + ), ], }, { "key": "instrument-status", "kwdargs": {}, "transforms": [ - ("remap_values_by_columns", {"columns": survey_columns, "value_map": survey_instrument_map}), + ( + "remap_values_by_columns", + {"columns": survey_columns, "value_map": survey_instrument_map}, + ), ("map_missing_values_by_columns", {"columns": survey_columns}), ("keep_columns", {"columns": index_columns + survey_columns}), ], @@ -164,13 +182,20 @@ "kwdargs": {}, "transforms": [ ("drop_rows", {"columns": repeat_survey_columns}), - ("aggregate_repeat_instrument_by_index", {"aggregator": np.max, "dtype": str}), - ("keep_columns", {"columns": index_columns + repeat_survey_data_columns}), + ( + "aggregate_repeat_instrument_by_index", + {"aggregator": np.max, "dtype": str}, + ), + ( + "keep_columns", + {"columns": index_columns + repeat_survey_data_columns}, + ), ], }, ], "post_transform_merge": ( - "participant-value", [ + "participant-value", + [ # ("participant-value", {"on": index_columns, "how": "inner"}), ("instrument-status", {"on": index_columns, "how": "inner"}), ("repeat-instrument", {"on": index_columns, "how": "outer"}), @@ -178,7 +203,10 @@ ], ), "post_merge_transforms": [ - ("remap_values_by_columns",{"columns": repeat_survey_columns, "value_map": survey_instrument_map}), + ( + "remap_values_by_columns", + {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + ), ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), ], "index_columns": ["record_id"], @@ -1790,7 +1818,11 @@ "field": "scrcmpdat", "missing_value": missing_value_generic, "astype": int, - "remap": lambda x: datetime.strptime(x["record"][x["accessors"]["x"]["field"]], "%Y-%m-%d").isocalendar().week + "remap": lambda x: datetime.strptime( + x["record"][x["accessors"]["x"]["field"]], "%Y-%m-%d" + ) + .isocalendar() + .week, }, "y": { "name": "Cumulative Count (N)", @@ -1800,7 +1832,7 @@ }, }, }, - ] + ], }, ) @@ -1854,7 +1886,7 @@ }, }, }, - ] + ], }, ) @@ -1907,7 +1939,7 @@ }, }, }, - ] + ], }, ) @@ -2142,7 +2174,7 @@ }, }, } - ] + ], }, ) diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index fcfbdf36..e127f896 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -30,7 +30,7 @@ def __init__( ) # Configure Logging - logging.basicConfig(**self.logging_config); + logging.basicConfig(**self.logging_config) self.logger = logging.getLogger("VizModTransform") # @@ -56,7 +56,7 @@ def __init__( raise ValueError( f"ModuleTransform instantiation missing transforms argument" ) - elif (type(self.transforms) != list): + elif type(self.transforms) != list: self.valid = False raise ValueError( f"ModuleTransform argument transforms must be a list or dict type" @@ -173,9 +173,9 @@ def simpleTransform(self, df: pd.DataFrame) -> object: One transform for one VType. """ self.transformed = [] - transform: Dict[str, Any] = ( - self.transforms.pop() - ) # simple transforms have only one transform object + transform: Dict[ + str, Any + ] = self.transforms.pop() # simple transforms have only one transform object name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 0a97a1be..90a5d04d 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -31,9 +31,7 @@ def __init__(self, config: dict) -> None: # Report Merging self.post_transform_merge = ( - config["post_transform_merge"] - if "post_transform_merge" in config - else [] + config["post_transform_merge"] if "post_transform_merge" in config else [] ) # Post Merge Transforms @@ -132,7 +130,7 @@ def __init__(self, config: dict) -> None: "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t" + "csv_delimiter": "\t", } # Get & Structure Report self.logger.info(f"Retrieving REDCap reports") @@ -285,9 +283,10 @@ def _keep_columns( columns: List[str] = [], annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - columns = list(set(df.columns) - set( - self._resolve_columns_with_dataframe(df=df, columns=columns) - )) + columns = list( + set(df.columns) + - set(self._resolve_columns_with_dataframe(df=df, columns=columns)) + ) df = df.drop(columns=columns) return df @@ -562,14 +561,29 @@ def aggregate_repeat_instrument_by_index( # def _new_column_from_binary_columns_positive_class( - self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, annotation: List[Dict[str, Any]] = [] + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + dtype: Callable = float, + annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - new_column_name = new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) - df[new_column_name] = df[list(column_name_map.keys())].idxmax(axis=1).map(column_name_map) + new_column_name = ( + new_column_name + if len(new_column_name) > 0 + else "_".join(column_name_map.keys()) + ) + df[new_column_name] = ( + df[list(column_name_map.keys())].idxmax(axis=1).map(column_name_map) + ) return df def new_column_from_binary_columns_positive_class( - self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + dtype: Callable = float, ) -> pd.DataFrame: """ Pre-processing REDCap repeat_instrument so each instrument @@ -578,18 +592,33 @@ def new_column_from_binary_columns_positive_class( field. """ return self._new_column_from_binary_columns_positive_class( - df=df, column_name_map=column_name_map, new_column_name = new_column_name, dtype=dtype + df=df, + column_name_map=column_name_map, + new_column_name=new_column_name, + dtype=dtype, ) def _new_column_from_binary_columns_negative_class( - self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + dtype: Callable = float, ) -> pd.DataFrame: - new_column_name = new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) + new_column_name = ( + new_column_name + if len(new_column_name) > 0 + else "_".join(column_name_map.keys()) + ) df[new_column_name] = df[list(column_name_map.keys())].idxmin(axis=1) return df def new_column_from_binary_columns_negative_class( - self, df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + dtype: Callable = float, ) -> pd.DataFrame: """ Pre-processing REDCap repeat_instrument so each instrument @@ -598,7 +627,10 @@ def new_column_from_binary_columns_negative_class( field. """ return self._new_column_from_binary_columns_negative_class( - df=df, column_name_map=column_name_map, new_column_name=new_column_name, dtype=dtype + df=df, + column_name_map=column_name_map, + new_column_name=new_column_name, + dtype=dtype, ) # diff --git a/modules/etl/vtypes/categorical.py b/modules/etl/vtypes/categorical.py index 0d980baf..3415bcc4 100644 --- a/modules/etl/vtypes/categorical.py +++ b/modules/etl/vtypes/categorical.py @@ -14,6 +14,7 @@ def __init__(self) -> None: str, ) + class DoubleCategorical(SimpleVType): def __init__(self) -> None: super(DoubleCategorical, self).__init__( @@ -28,6 +29,7 @@ def __init__(self) -> None: str, ) + if __name__ == "__main__": pass else: diff --git a/modules/etl/vtypes/compound.py b/modules/etl/vtypes/compound.py index 0d681232..82ce4366 100644 --- a/modules/etl/vtypes/compound.py +++ b/modules/etl/vtypes/compound.py @@ -10,6 +10,7 @@ from typing import Tuple, List, Dict, Callable, Any import pandas as pd + class Compound(ComplexVType): def __init__(self) -> None: super(Compound, self).__init__( @@ -29,6 +30,7 @@ def __init__(self) -> None: str, ) + if __name__ == "__main__": pass else: diff --git a/modules/etl/vtypes/continuous.py b/modules/etl/vtypes/continuous.py index a89b3f11..607745ae 100644 --- a/modules/etl/vtypes/continuous.py +++ b/modules/etl/vtypes/continuous.py @@ -1,5 +1,6 @@ from .vtype import SimpleVType + class SingleContinuous(SimpleVType): def __init__(self) -> None: super(SingleContinuous, self).__init__( @@ -8,6 +9,7 @@ def __init__(self) -> None: float, ) + class DoubleContinuous(SimpleVType): def __init__(self) -> None: super(DoubleContinuous, self).__init__( @@ -22,6 +24,7 @@ def __init__(self) -> None: float, ) + if __name__ == "__main__": pass else: diff --git a/modules/etl/vtypes/mixed.py b/modules/etl/vtypes/mixed.py index ca2bfad1..fe24f477 100644 --- a/modules/etl/vtypes/mixed.py +++ b/modules/etl/vtypes/mixed.py @@ -32,6 +32,7 @@ def __init__(self) -> None: str, ) + if __name__ == "__main__": pass else: diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index 382a6806..b565f829 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -38,6 +38,7 @@ def isvalid(self, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]]) -> boo return False return True + class ComplexVType(object): def __init__( self, @@ -72,7 +73,9 @@ def __str__(self): # continue # return valid - def isvalid(self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]]) -> bool: + def isvalid( + self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + ) -> bool: valid = True columns = df.columns for accessors in accessorsList: @@ -94,7 +97,6 @@ def isvalid(self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str] return valid - if __name__ == "__main__": pass else: From 2f7bad0037d84884d8e17bd6f47d916a149ae911 Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Mon, 4 Dec 2023 19:21:48 -0800 Subject: [PATCH 377/505] =?UTF-8?q?feat:=20=E2=9C=A8=20pytest=20multiple?= =?UTF-8?q?=20user=20permissioned=20clients=20+=20bug=20fixes=20(#30)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: create multiple accounts for permissions * style: 🎨 fix code style issues with Black * refactor: update error messages for access denied on POST/PUT endpoints * refactor: invite accounts to study after creating study * fix: check user permissions for Sponsors and Other endpoints * refactor: update permissions error message on POST/PUT endpoints * feat: sign up invited users for testing * refactor: change access denied message for publishing dataset * wip: create multiple clients for testing session that persists across modules * style: 🎨 fix code style issues with Black * feat: create multiple clients to simulate multiple users interacting with server * feat: all tests have access to all available clients during testing * feat: test admin, editor, viewer permissions for study endpoints * feat: admin, editor, viewer tests added to study arm, alt id metadata * feat: admin, editor, viewer test to update dataset access metadata * feat: admin, editor, viewer test to delete dataset alt id metadata * feat: admin, editor, viewer test to GET dataset consent metadata * feat: admin, editor, viewer test to PUT dataset consent metadata * feat: admin, editor, viewer test to PUT dataset contributor metadata * feat: admin, editor, viewer test to DELETE dataset contributor metadata * feat: admin, editor, viewer test to GET dataset creator metadata * style: 🎨 fix code style issues with Black * admin, editor, viewer permission check created for POST arm metadata * admin, editor, viewer permission check created for GET arm metadata * admin, editor, viewer permission check created for DELETE arm metadata * admin, editor, viewer permission check created for POST avail ipd metadata * admin, editor, viewer permission check created for GET avail ipd metadata * admin, editor, viewer permission check created for DELETE avail ipd metadata * admin, editor, viewer permission check created for POST central contact metadata * admin, editor, viewer permission check created for DELETE central contact metadata * admin, editor, viewer permission check created for GET collaborators metadata * admin, editor, viewer permission check created for PUT collaborators metadata * admin, editor, viewer permission check created for GET conditions metadata * admin, editor, viewer permission check created for PUT conditions metadata * admin, editor, viewer permission check created for GET description metadata * admin, editor, viewer permission check created for PUT description metadata * admin, editor, viewer permission check created for GET design metadata * admin, editor, viewer permission check created for PUT design metadata * admin, editor, viewer permission check created for GET eligibility metadata * admin, editor, viewer permission check created for PUT eligibility metadata * admin, editor, viewer permission check created for GET identification metadata * admin, editor, viewer permission check created for POST identification metadata * admin, editor, viewer permission check created for DELETE identification metadata * admin, editor, viewer permission check created for GET intervention metadata * admin, editor, viewer permission check created for POST intervention metadata * admin, editor, viewer permission check created for GET link metadata * admin, editor, viewer permission check created for GET location metadata * style: 🎨 fix code style issues with Black * admin, editor, viewer test for PUT ipdsharing metadata * admin, editor, viewer test for POST link metadata * admin, editor, viewer test for DELETE link metadata * admin, editor, viewer test for POST location metadata * admin, editor, viewer test for DELETE location metadata * admin, editor, viewer test for PUT other metadata * admin, editor, viewer test for GET overall official metadata * admin, editor, viewer test for POST overall official metadata * fix: check permissions for deleting dataset alt identifier * fix: update on list index for admin/editor data responses * fix: correct available ipd vars for testing * fix: corrected bug issues throughout study metadata tests * feat: admin, editor, viewer test for PUT oversight metadata * feat: admin, editor, viewer test for GET reference metadata * feat: admin, editor, viewer test for POST reference metadata * fix: add comma between POST request parameters * fix: add comma between DELETE request parameters * feat: admin, editor, viewer test for GET sponsors metadata * feat: admin, editor, viewer test for PUT sponsors metadata * feat: admin, editor, viewer test for GET status metadata * feat: admin, editor, viewer test for PUT status metadata * fix: correct payload for PUT sponsors endpoint * style: 🎨 fix code style issues with Black * admin, editor, viewer test for PUT sponsors metadata * admin, editor, viewer test for GET all datasets * admin, editor, viewer test for POST dataset * admin, editor, viewer test for GET specific dataset * admin, editor, viewer test for POST creator dataset metadata * admin, editor, viewer test for DELETE creator dataset metadata * admin, editor, viewer test for POST date dataset metadata * admin, editor, viewer test for DELETE date dataset metadata * admin, editor, viewer test for GET de-identification dataset metadata * admin, editor, viewer test for PUT de-identification dataset metadata * admin, editor, viewer test for GET description dataset metadata * admin, editor, viewer test for POST description dataset metadata * admin, editor, viewer test for DELETE dataset metadata * admin, editor, viewer test for GET funder dataset metadata * admin, editor, viewer test for POST funder dataset metadata * refactor: ensure status code is 204 for contributors * refactor: ensure status code is 204 for dataset * refactor: import Response and ensure 404 response in study api * refactor: import Response and ensure 404 response in dataset alt id api * refactor: import Response and ensure 404 response in dataset contrib api * refactor: import Response and ensure 404 response in dataset date api * refactor: import Response and ensure 404 response in dataset descrip api * refactor: import Response and ensure 404 response in dataset funder api * refactor: import Response and ensure 404 response in dataset related irms api * refactor: import Response and ensure 404 response in dataset rights irms api * refactor: import Response and ensure 404 response in dataset subject irms api * refactor: import Response and ensure 404 response in study avail ipd api * refactor: import Response and ensure 404 response in study identification api * refactor: import Response and ensure 404 response in study intervention api * refactor: import Response and ensure 204 response in study link api * refactor: import Response and ensure 204 response in study location api * refactor: import Response and ensure 204 response in study overall official api * fix: add import * fix: typo for investigator name test * refactor: ensure 204 status is return when deleting * fix: return Response(status) * fix: don't return content upon deleting central contact (api) * fix: dataset contributor status code fix * style: 🎨 fix code style issues with Black * :bug: fix: corrected issues with deleting responses status code * :sparkles: feat: verify data in GET response for study api * :recycle: refactor: GET test after POST for dataset api and differentiate dataset contents for all clients * :sparkles: verify all clients in GET datasets api * :sparkles: feat: verify content for GET specific dataset endpoint * :sparkles: feat: admin, editor, viewer test for DELETE dataset api * :sparkles: feat: verify GET dataset access after PUT for all clients * :sparkles: feat: verify GET response data for all clients in dataset alt identifier endpoint * :sparkles: verify GET dataset consent metadata after POST for all clients * :sparkles: verify GET dataset creator metadata after POST for all clients * :sparkles: verify GET dataset data metadata after POST for all clients * :sparkles: verify GET dataset de-id lvl metadata after POST for all clients * :sparkles: verify GET dataset description metadata after POST for all clients * :sparkles: verify GET dataset funder metadata after POST for all clients * :sparkles: feat: PUT tests for other dataset metadata created for all clients, GET response added after * :sparkles: verify GET dataset other metadata after POST for all clients * :sparkles: verify GET dataset publisher metadata after POST for all clients * :sparkles: feat: PUT dataset publisher metadata added for all clients * :sparkles: verify GET dataset record keys metadata after POST for all clients * :sparkles: feat: POST tests created for all clients in dataset related items api * :sparkles: verify GET dataset related items metadata after POST for all clients * :sparkles: feat: DELETE dataset contributor metadata tests created for all clients * :sparkles: feat: DELETE dataset related-item identifier metadata tests created for all clients * :sparkles: feat: all DELETE dataset related-item metadata tests created for all clients * style: 🎨 fix code style issues with Black * :bug: fix: correcting bugs for global variables and assertment checks * :bug: fix: correct variable for response data in GET specific dataset * :recycle: refactor: create multiple dummy datasets for deleting with different clients * :bug: fix: fix list index when going through POST contributor responses * :bug: fix: assert correct length of dataset descriptions in GET test * :bug: fix: patched multiple tests for pytest * :sparkles: feat: GET tests for all clients created for record-keys metadata * :bug: fix: final patches to all tests * :recycle: refactor: move GET dataset metadata responses after POST/PUT * :bug: fix: fix merge issues * :sparkles: feat: arm metadata GET tests for all clients * :sparkles: feat: avail ipd metadata GET tests for all clients * :sparkles: feat: central contact metadata GET tests for all clients * :sparkles: feat: collaborators metadata GET tests for all clients * :sparkles: feat: description metadata GET tests for all clients * :sparkles: feat: description metadata GET tests for all clients * :sparkles: feat: design metadata GET tests for all clients * :sparkles: feat: eligibility metadata GET tests for all clients * :sparkles: feat: identification metadata GET tests for all clients * :bug: fix: correct assert responses * :sparkles: feat: intervention metadata GET tests for all clients * style: 🎨 fix code style issues with Black * :sparkles: feat: GET ipdsharing study metadata for all clients created after POST * :sparkles: feat: GET study links metadata for all clients created after POST * :sparkles: feat: GET study locations metadata for all clients created after POST * :sparkles: feat: GET other study metadata for all clients created after POST * :sparkles: feat: GET overall official study metadata for all clients created after POST * :sparkles: feat: GET oversight study metadata for all clients created after PUT * :sparkles: feat: GET reference study metadata for all clients created after POST * :sparkles: feat: GET sponsors study metadata for all clients created after PUT * :sparkles: feat: GET status study metadata for all clients created after PUT * :bug: fix: assertions on failed tests * :recycle: refactor: remove study/other metadata endpont (not used in frontend) * :sparkles: feat: POST dataset rights metadata created for all clients * :sparkles: feat: GET dataset rights metadata for all clients * :sparkles: feat: DELETE for dataset subjects metadata created for all clients * :sparkles: feat: POST for dataset subject metadata created for admin client * :sparkles: feat: POST for dataset title create for all clients * :sparkles: feat: GET dataset title metadata created for all clients * :sparkles: feat: DELETE dataset title metadata created for all clients * :construction: wip: correct issue with items having same created_at timestamp * style: 🎨 fix code style issues with Black * :sparkles: feat: viewer client test added for GET study link metadata * :sparkles: feat: assert POST responses for version testing before POSTing version * :sparkles: feat: GET versioned study tests created for all clients * :sparkles: feat: GET versioned dataset metadata tests created for all clients * :sparkles: feat: GET versioned dataset readme metadata tests created for all clients * :sparkles: feat: PUT dataset readme version test created for all clients * :sparkles: feat: PUT dataset changelog version test created for all clients * :sparkles: feat: GET dataset changelog version test created for all clients * :bug: fix: normalize return response with GET and PUT changelog version api * :sparkles: feat: DELETE original datasets created by admin and editor clients * :sparkles: feat: GET dataset version test created for all clients * style: 🎨 fix code style issues with Black * :bug: fix: fix permission check for deleting dataset version * :bug: fix: patch PUT dataset version to proper permission check * :bug: fix: remove jsonify from return response on PUT version endpoint * :sparkles: fix: add needed global variables needed for pytest * :sparkles: feat: add test POST dataset version to study version file * :sparkles: feat: add test GET all dataset versions to study version file * :sparkles: feat: add GET specific dataset version test for all clients * :sparkles: feat: add PUT dataset version test for all clients * :recycle: refactor: remove version tests from dataset file and move to version file (pytest) * :bug: fix: verify viewer status code for delete dataset descrip * :sparkles: feat: DELETE dataset funder metadata test added for all clients * :sparkles: feat: DELETE dataset rights metadata for all clients created * :bug: fix: add pause between client POST calls for to prevent duplicate time stamps * :sparkles: feat: DELETE study intervention metadata test created for all clients * :bug: fix: (wip) final fixes before PR * style: 🎨 fix code style issues with Black * :sparkles: feat: README.md instructions for creating pytest created for documentation * :recycle: refactor: remove jsonify from datasets.py * :recycle: refactor: update email address for pytest clients --------- Co-authored-by: Lint Action --- apis/authentication.py | 2 + apis/contributor.py | 6 +- apis/dataset.py | 28 +- .../dataset_alternate_identifier.py | 9 +- apis/dataset_metadata/dataset_contributor.py | 4 +- apis/dataset_metadata/dataset_date.py | 2 +- apis/dataset_metadata/dataset_description.py | 2 +- apis/dataset_metadata/dataset_funder.py | 2 +- apis/dataset_metadata/dataset_related_item.py | 10 +- apis/dataset_metadata/dataset_rights.py | 2 +- apis/dataset_metadata/dataset_subject.py | 2 +- apis/dataset_metadata/dataset_title.py | 2 +- apis/participant.py | 2 +- apis/study.py | 2 +- apis/study_metadata/study_arm.py | 5 +- apis/study_metadata/study_available_ipd.py | 5 +- apis/study_metadata/study_contact.py | 5 +- apis/study_metadata/study_description.py | 2 +- apis/study_metadata/study_design.py | 2 +- apis/study_metadata/study_eligibility.py | 2 +- apis/study_metadata/study_identification.py | 5 +- apis/study_metadata/study_intervention.py | 5 +- apis/study_metadata/study_ipdsharing.py | 2 +- apis/study_metadata/study_link.py | 5 +- apis/study_metadata/study_location.py | 5 +- apis/study_metadata/study_other.py | 60 +- apis/study_metadata/study_overall_official.py | 4 +- apis/study_metadata/study_reference.py | 5 +- .../study_sponsors_collaborators.py | 6 +- apis/study_metadata/study_status.py | 2 +- tests/README.md | 32 + tests/conftest.py | 220 +- tests/functional/test_server_launch.py | 4 +- tests/functional/test_study_api.py | 138 +- tests/functional/test_study_dataset_api.py | 313 +- .../test_study_dataset_metadata_api.py | 3423 +++++++++++++++-- tests/functional/test_study_metadata_api.py | 2651 +++++++++++-- tests/functional/test_study_version_api.py | 835 +++- tests/unit/test_study_models.py | 40 +- 39 files changed, 7000 insertions(+), 851 deletions(-) create mode 100644 tests/README.md diff --git a/apis/authentication.py b/apis/authentication.py index 3d9c9758..20c5e990 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -324,6 +324,7 @@ def is_granted(permission: str, study=None): "delete_dataset", "version", "publish_version", + "delete_version", "participant", "study_metadata", "dataset_metadata", @@ -342,6 +343,7 @@ def is_granted(permission: str, study=None): "delete_dataset", "version", "publish_version", + "delete_version", "participant", "study_metadata", "dataset_metadata", diff --git a/apis/contributor.py b/apis/contributor.py index b0062b1b..44b0bcc2 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -42,7 +42,7 @@ def get(self, study_id: int): def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): - return "Access denied, you can not modify", 403 + return "Access denied, you can not modify study", 403 data: Union[dict, Any] = request.json email_address = data["email_address"] @@ -115,7 +115,7 @@ def put(self, study_id: int, user_id: int): return grantee.to_dict(), 200 @api.doc("contributor delete") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, user_id: str): study = model.Study.query.get(study_id) @@ -218,4 +218,4 @@ def put(self, study_id: int, user_id: int): existing_owner.permission = "admin" model.db.session.commit() - return 204 + return Response(status=204) diff --git a/apis/dataset.py b/apis/dataset.py index f828e482..e1b4a3e7 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,6 +1,6 @@ import typing -from flask import jsonify, request, Response +from flask import request, Response from flask_restx import Namespace, Resource, fields import model @@ -56,7 +56,7 @@ def get(self, study_id): def post(self, study_id): study = model.Study.query.get(study_id) if not is_granted("add_dataset", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not create a dataset", 403 data: typing.Union[typing.Any, dict] = request.json dataset_ = model.Dataset.from_data(study) model.db.session.add(dataset_) @@ -94,7 +94,7 @@ def put(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("update_dataset", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not modify dataset", 403 data: typing.Union[dict, typing.Any] = request.json data_obj = model.Dataset.query.get(dataset_id) @@ -104,13 +104,13 @@ def put(self, study_id: int, dataset_id: int): return data_obj.to_dict(), 200 - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") @api.doc("delete dataset") def delete(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("delete_dataset", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not delete dataset", 403 data_obj = model.Dataset.query.get(dataset_id) for version in data_obj.dataset_versions: @@ -131,7 +131,7 @@ def get( ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) if not is_granted("version", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not modify dataset", 403 dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict(), 200 @@ -142,22 +142,22 @@ def put( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) - if not is_granted("publish_dataset", study): - return "Access denied, you can not modify", 403 + if not is_granted("publish_version", study): + return "Access denied, you can not publish dataset", 403 data_version_obj = model.Version.query.get(version_id) data_version_obj.update(request.json) model.db.session.commit() - return jsonify(data_version_obj.to_dict()), 200 + return data_version_obj.to_dict(), 200 - @api.response(201, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") @api.doc("delete dataset version") def delete( self, study_id: int, dataset_id: int, version_id: int ): # pylint: disable= unused-argument study = model.Study.query.get(study_id) - if not is_granted("delete_dataset", study): - return "Access denied, you can not modify", 403 + if not is_granted("delete_version", study): + return "Access denied, you can not delete dataset", 403 version_obj = model.Version.query.get(version_id) model.db.session.delete(version_obj) model.db.session.commit() @@ -182,7 +182,7 @@ def get(self, study_id: int, dataset_id: int): def post(self, study_id: int, dataset_id: int): study = model.Study.query.get(study_id) if not is_granted("version", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not modify dataset", 403 data: typing.Union[typing.Any, dict] = request.json # data["participants"] = [ @@ -269,7 +269,7 @@ def put( version_ = model.Version.query.get(version_id) version_.changelog = data["changelog"] model.db.session.commit() - return version_.changelog, 200 + return {"changelog": version_.changelog}, 200 @api.route("/study//dataset//version//readme") diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index d2c6049e..2971e583 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -120,16 +120,23 @@ class DatasetAlternateIdentifierUpdate(Resource): """Dataset Alternate Identifier Update Resource""" @api.doc("delete identifier") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, study_id: int, dataset_id: int, identifier_id: int ): # pylint: disable= unused-argument """Delete dataset alternate identifier""" + study_obj = model.Study.query.get(study_id) dataset_identifier_ = model.DatasetAlternateIdentifier.query.get( identifier_id ) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not make any change in dataset metadata", + 403, + ) + model.db.session.delete(dataset_identifier_) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py index 6e8e457c..7fb9cd08 100644 --- a/apis/dataset_metadata/dataset_contributor.py +++ b/apis/dataset_metadata/dataset_contributor.py @@ -141,7 +141,7 @@ class DatasetContributorDelete(Resource): """Dataset Contributor Delete Resource""" @api.doc("delete contributor") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, @@ -278,7 +278,7 @@ def post(self, study_id: int, dataset_id: int): @api.route("/study//dataset//metadata/creator/") class DatasetCreatorDelete(Resource): @api.doc("delete creator") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 11af423e..57916514 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -95,7 +95,7 @@ class DatasetDateDeleteResource(Resource): """Dataset Date Delete Resource""" @api.doc("delete date") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, study_id: int, dataset_id: int, date_id: int diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 6e45d9be..68ffdd9f 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -110,7 +110,7 @@ class DatasetDescriptionUpdate(Resource): """Dataset Description Update Resource""" @api.doc("delete description") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 5001bd18..0b03e14f 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -104,7 +104,7 @@ class DatasetFunderUpdate(Resource): """Dataset Funder Update Resource""" @api.doc("delete funder") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 3f889378..2b66de23 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -310,7 +310,7 @@ class DatasetRelatedItemUpdate(Resource): """Dataset Related Item Update Resource""" @api.doc("delete related item") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, @@ -338,7 +338,7 @@ class RelatedItemContributorsDelete(Resource): """Dataset Related Item Contributors Delete Resource""" @api.doc("delete related item contributors") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, @@ -368,7 +368,7 @@ class RelatedItemTitlesDelete(Resource): """Dataset Related Item Titles Delete Resource""" @api.doc("delete related item title") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, @@ -400,7 +400,7 @@ class RelatedItemIdentifiersDelete(Resource): """Dataset Related Item Identifiers Delete Resource""" @api.doc("delete related item identifier") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, @@ -429,7 +429,7 @@ class RelatedItemCreatorDelete(Resource): """Dataset Related Item Creator Delete Resource""" @api.doc("delete related item creator") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 194cde6e..024d50a5 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -92,7 +92,7 @@ class DatasetRightsUpdate(Resource): """Dataset Rights Update Resource""" @api.doc("delete rights") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 37fbed20..2de1a38a 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -100,7 +100,7 @@ class DatasetSubjectUpdate(Resource): """Dataset Subject Update Resource""" @api.doc("delete subject") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index 1b1ed313..e9ebba7f 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -101,7 +101,7 @@ class DatasetTitleDelete(Resource): """Dataset Title Update Resource""" @api.doc("delete title") - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete( self, diff --git a/apis/participant.py b/apis/participant.py index 71a34253..c30d3769 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -62,7 +62,7 @@ def put(self, study_id: int, participant_id: int): model.db.session.commit() return update_participant.to_dict() - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, participant_id: int): if is_granted("viewer", study_id): diff --git a/apis/study.py b/apis/study.py index 42cc0f4d..66c6d51d 100644 --- a/apis/study.py +++ b/apis/study.py @@ -137,7 +137,7 @@ def put(self, study_id: int): return update_study.to_dict(), 200 - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") @api.doc(description="Delete a study") def delete(self, study_id: int): diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 833a82c2..5ffd2bf6 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -76,7 +76,7 @@ def post(self, study_id): study: model.Study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) for i in data: @@ -95,6 +95,9 @@ def post(self, study_id): # todo delete @api.route("/study//metadata/arm/") + @api.doc("Delete Study Arms") + @api.response(204, "Success") + @api.response(400, "Validation Error") class StudyArmUpdate(Resource): """Study Arm Metadata""" diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 1ab4d82a..c7be0d58 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -87,7 +87,7 @@ def post(self, study_id: int): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) @@ -110,6 +110,9 @@ def post(self, study_id: int): class StudyLocationUpdate(Resource): """Study Available Metadata""" + @api.doc("delete available-ipd") + @api.response(204, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, available_ipd_id: int): """Delete study available metadata""" study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 1f385953..45b6eeb3 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -100,7 +100,7 @@ def validate_is_valid_email(instance): study = model.Study.query.get(study_id) if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) @@ -125,6 +125,9 @@ def validate_is_valid_email(instance): class StudyContactUpdate(Resource): """Study Contact Metadata""" + @api.doc("Delete Study contacts") + @api.response(204, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, central_contact_id: int): """Delete study contact metadata""" study = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index af6baff6..cd2be85e 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -57,7 +57,7 @@ def put(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 study_ = model.Study.query.get(study_id) study_.study_description.update(request.json) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 3794e982..15dd68d1 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -213,7 +213,7 @@ def put(self, study_id: int): study_ = model.Study.query.get(study_id) # Check user permissions if not is_granted("study_metadata", study_): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 study_.study_design.update(data) diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 536c7089..681318e8 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -88,7 +88,7 @@ def put(self, study_id: int): study_ = model.Study.query.get(study_id) # Check user permissions if not is_granted("study_metadata", study_): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 study_.study_eligibility.update(request.json) model.db.session.commit() diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index da7698f2..806d1041 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -79,7 +79,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json identifiers = [i for i in study_obj.study_identification if not i.secondary] @@ -116,6 +116,9 @@ def post(self, study_id: int): class StudyIdentificationdUpdate(Resource): """Study Identification Metadata""" + @api.doc("Delete Study Identifications") + @api.response(204, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, identification_id: int): """Delete study identification metadata""" study = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index bc43bd51..32070070 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -97,7 +97,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 list_of_elements = [] data: typing.Union[dict, typing.Any] = request.json for i in data: @@ -116,6 +116,9 @@ def post(self, study_id: int): class StudyInterventionUpdate(Resource): """Study Intervention Metadata""" + @api.doc("Delete Study Interventions") + @api.response(204, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, intervention_id: int): """Delete study intervention metadata""" study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index c9948b6f..97cb9921 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -97,7 +97,7 @@ def put(self, study_id: int): study_ = model.Study.query.get(study_id) if not is_granted("study_metadata", study_): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 study_.study_ipdsharing.update(request.json) model.db.session.commit() return study_.study_ipdsharing.to_dict(), 200 diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index ad1b2a2c..e8b72b98 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -61,7 +61,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json list_of_elements = [] for i in data: @@ -84,6 +84,9 @@ def post(self, study_id: int): class StudyLinkUpdate(Resource): """Study Link Metadata""" + @api.doc("Delete study links") + @api.response(204, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, link_id: int): """Delete study link metadata""" study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 27d1c2f1..9296f108 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -83,7 +83,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json list_of_elements = [] for i in data: @@ -103,6 +103,9 @@ def post(self, study_id: int): class StudyLocationUpdate(Resource): """Study Location Metadata""" + @api.doc("delete study locations") + @api.response(204, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, location_id: int): """Delete study location metadata""" study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index bcf359fc..07d27b2b 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -22,62 +22,6 @@ ) -@api.route("/study//metadata/other") -class StudyOtherResource(Resource): - """Study Other Metadata""" - - @api.doc("other") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") - @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study other metadata""" - study_ = model.Study.query.get(study_id) - - study_other_ = study_.study_other - - return study_other_.to_dict(), 200 - - def put(self, study_id: int): - """Update study other metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "oversight_has_dmc": {"type": "boolean"}, - "conditions": { - "type": "array", - "items": {"type": "string"}, - "minItems": 1, - "uniqueItems": True, - }, - "keywords": { - "type": "array", - "items": {"type": "string"}, - "minItems": 1, - "uniqueItems": True, - }, - "size": {"type": "integer"}, - }, - "required": ["oversight_has_dmc", "conditions", "keywords", "size"], - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - study_ = model.Study.query.get(study_id) - - study_.study_other.update(request.json) - - model.db.session.commit() - - return study_.study_other.to_dict(), 200 - - @api.route("/study//metadata/oversight") class StudyOversightResource(Resource): """Study Oversight Metadata""" @@ -110,7 +54,7 @@ def put(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json study_oversight = study_obj.study_other.oversight_has_dmc = data[ "oversight_has_dmc" @@ -157,7 +101,7 @@ def put(self, study_id: int): data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 study_obj.study_other.conditions = data study_obj.touch() model.db.session.commit() diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index c0d50c6f..2a210730 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -80,7 +80,7 @@ def post(self, study_id: int): data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 list_of_elements = [] for i in data: if "id" in i and i["id"]: @@ -98,7 +98,7 @@ def post(self, study_id: int): @api.route("/study//metadata/overall-official/") class StudyOverallOfficialUpdate(Resource): - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def delete(self, study_id: int, overall_official_id: int): """Delete study overall official metadata""" diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index d084b724..af179b16 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -67,7 +67,7 @@ def post(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 data: typing.Union[dict, typing.Any] = request.json list_of_elements = [] for i in data: @@ -86,6 +86,9 @@ def post(self, study_id: int): class StudyReferenceUpdate(Resource): """Study Reference Metadata""" + @api.doc("delete reference") + @api.response(204, "Success") + @api.response(400, "Validation Error") def delete(self, study_id: int, reference_id: int): """Delete study reference metadata""" study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 8a040c4d..da1dba0b 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -117,6 +117,10 @@ def put(self, study_id: int): study_ = model.Study.query.get(study_id) + # Check user permissions + if not is_granted("study_metadata", study_): + return "Access denied, you can not modify study", 403 + study_.study_sponsors_collaborators.update(request.json) model.db.session.commit() @@ -158,7 +162,7 @@ def put(self, study_id: int): data: typing.Union[dict, typing.Any] = request.json study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 study_obj.study_sponsors_collaborators.collaborator_name = data study_obj.touch() model.db.session.commit() diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index cd4cef42..84cbe9d3 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -100,7 +100,7 @@ def put(self, study_id: int): study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 + return "Access denied, you can not modify study", 403 study = model.Study.query.get(study_id) study.study_status.update(request.json) diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 00000000..7be5a31c --- /dev/null +++ b/tests/README.md @@ -0,0 +1,32 @@ +# Pytest for AI-READI API Testing + +Pytest is a testing framework that allows you to write tests for your code. It is used for testing the API of AI-READI. + +## Running Tests + +To run the tests, you can use the following commands: + +```bash +# Run all tests +poe test +# Run all tests with print statements +poe test_with_capture +``` + +## Writing Tests + +To write tests, you can follow the format of the tests in the `tests/functional` directory. The tests are written using pytest fixtures. You can read more about fixtures [here](https://docs.pytest.org/en/stable/fixture.html). + +Pytest Fixtures allow for functions to be used across testing files and modules. This is useful for setting up the testing clients needed throughout testing. For example, the `client` fixture in `tests/conftest.py` is used to create multiple clients for testing. The `client` fixture creates multiple users with different permissions after the POST request is made to the `/study` endpoint. This allows for the main user to invite contributors and admins to the study. + +Everytime the test is run with `poe test`, the database is cleared and the fixtures are run again. This ensures that the tests are run on a clean database everytime. This is done by using the `@pytest.fixture(scope='session')` decorator. The `scope='session'` ensures that the fixture is only run once per session. Then after the main user is logged in with `_logged_in_user` but then `clients` is used once all users have been created and signed in. + +To create fixtures resort to using the `@pytest.fixture` decorator. This will allow for the fixture to be used in other testing files. The `@pytest.fixture` decorator can take in a `scope` argument. The `scope` argument can be used to specify how often the fixture is run. The `scope` argument can take in the following values: + +- `function`: The fixture is run once per test function. (default) +- `class`: The fixture is run once per test class. +- `module`: The fixture is run once per test module. +- `package`: The fixture is run once per test package. +- `session`: The fixture is run once per test session. + +Global variables are created in `conftest.py` to manage important information such as `study_id`, `dataset_id`. These variables are used throughout the testing files. The `conftest.py` file is used to store fixtures that are used across multiple testing files. The `conftest.py` file is automatically discovered by pytest. You can read more about `conftest.py` [here](https://docs.pytest.org/en/stable/fixture.html#conftest-py-sharing-fixture-functions). \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 56fa4d12..4f8be7aa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ """Defines fixtures available to all tests.""" +import json import os import unittest.mock @@ -16,7 +17,7 @@ os.environ["FLASK_ENV"] = "testing" # Set global variable for study ID -# Study variables use for testing +# Study variables for main client pytest.global_study_id = {} pytest.global_arm_id = "" pytest.global_available_ipd_id = "" @@ -28,6 +29,65 @@ pytest.global_overall_official_id = "" pytest.global_reference_id = "" +# study variables for admin client and editor client +pytest.global_alternative_identifier_id_admin = "" +pytest.global_alternative_identifier_id_editor = "" +pytest.global_dataset_contributor_id_admin = "" +pytest.global_dataset_contributor_id_editor = "" +pytest.global_dataset_contributor_id_admin = "" +pytest.global_dataset_contributor_id_editor = "" +pytest.global_editor_arm_id_admin = "" +pytest.global_editor_arm_id_editor = "" +pytest.global_available_ipd_id_admin = "" +pytest.global_available_ipd_id_editor = "" +pytest.global_admin_cc_id_admin = "" +pytest.global_editor_cc_id_editor = "" +pytest.global_identification_id_admin = "" +pytest.global_identification_id_editor = "" +pytest.global_intervention_id_admin = "" +pytest.global_intervention_id_editor = "" +pytest.global_link_id_admin = "" +pytest.global_link_id_editor = "" +pytest.global_location_id_admin = "" +pytest.global_location_id_editor = "" +pytest.global_overall_official_id_admin = "" +pytest.global_overall_official_id_editor = "" +pytest.global_reference_id_admin = "" +pytest.global_reference_id_editor = "" + +# dataset variables for admin client and editor client +pytest.global_dataset_id_admin = "" +pytest.global_dataset_id_editor = "" +pytest.global_dataset_date_id_admin = "" +pytest.global_dataset_date_id_editor = "" +pytest.global_dataset_creator_id_admin = "" +pytest.global_dataset_creator_id_editor = "" +pytest.global_dataset_funder_id_admin = "" +pytest.global_dataset_funder_id_editor = "" +pytest.global_dataset_related_item_creator_id_admin = "" +pytest.global_dataset_related_item_creator_id_editor = "" +pytest.global_related_item_identifier_id_admin = "" +pytest.global_related_item_identifier_id_editor = "" +pytest.global_related_item_title_id_admin = "" +pytest.global_related_item_title_id_editor = "" +pytest.global_dataset_rights_id_admin = "" +pytest.global_dataset_rights_id_editor = "" +pytest.global_dataset_subject_id_admin = "" +pytest.global_dataset_subject_id_editor = "" +pytest.global_dataset_title_id_admin = "" +pytest.global_dataset_title_id_editor = "" + +pytest.global_dataset_related_item_identifier_id_admin = "" +pytest.global_dataset_related_item_title_id_admin = "" +pytest.global_dataset_related_item_title_id_editor = "" +pytest.global_dataset_related_item_contributor_id_admin = "" +pytest.global_dataset_related_item_id_admin = "" +pytest.global_dataset_related_item_id_editor = "" +pytest.global_dataset_related_item_contributor_id_editor = "" +pytest.global_dataset_description_id_admin = "" +pytest.global_dataset_description_id_editor = "" +pytest.global_dataset_related_item_identifier_id_editor = "" + # Dataset variables use for testing pytest.global_dataset_id = "" @@ -41,19 +101,25 @@ pytest.global_dataset_related_item_contributor_id = "" pytest.global_dataset_related_item_creator_id = "" pytest.global_dataset_related_item_identifier_id = "" -pytest.global_dataset_related_item_title_id = "" +pytest.global_dataset_related_item_main_title_id = "" +pytest.global_dataset_related_item_sub_title_id = "" pytest.global_dataset_rights_id = "" pytest.global_dataset_subject_id = "" pytest.global_dataset_title_id = "" pytest.global_dataset_version_id = "" +# User token codes +pytest.global_admin_token = "" +pytest.global_editor_token = "" +pytest.global_viewer_token = "" + # Create the flask app for testing @pytest.fixture(scope="session") def flask_app(): """An application for the tests.""" - yield create_app(config_module="pytest_config") + return create_app(config_module="pytest_config") # Create a test client for the app @@ -87,7 +153,7 @@ def _create_user(_test_client): json={ "email_address": "test@fairhub.io", "password": "Testingyeshello11!", - "code": "7654321", + "code": "", }, ) @@ -96,17 +162,157 @@ def _create_user(_test_client): # Fixture to sign in the user for module testing @pytest.fixture(scope="session") -def _logged_in_client(_test_client): +def _logged_in_client(flask_app): """Sign in the user for testing.""" + with flask_app.app_context(): + with flask_app.test_client() as _test_client: + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): + response = _test_client.post( + "/auth/login", + json={ + "email_address": "test@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + + assert response.status_code == 200 + response.close() + yield _test_client + + +@pytest.fixture(scope="session") +def _test_invite_study_contributor(_logged_in_client): + """Test invite study contributor.""" + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.post( + f"/study/{study_id}/contributor", + json={"email_address": "editor@fairhub.io", "role": "editor"}, + ) + + assert response.status_code == 201 + response_data = json.loads(response.data) + + pytest.global_editor_token = response_data["token"] + + response = _logged_in_client.post( + f"/study/{study_id}/contributor", + json={"email_address": "admin@fairhub.io", "role": "admin"}, + ) + + assert response.status_code == 201 + response_data = json.loads(response.data) + pytest.global_admin_token = response_data["token"] + + response = _logged_in_client.post( + f"/study/{study_id}/contributor", + json={"email_address": "viewer@fairhub.io", "role": "viewer"}, + ) + + assert response.status_code == 201 + response_data = json.loads(response.data) + pytest.global_viewer_token = response_data["token"] + + +@pytest.fixture(scope="session") +def _create_admin_user(flask_app): + """Create an admin user for testing.""" + with flask_app.test_client() as _test_client: + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): + response = _test_client.post( + "/auth/signup", + json={ + "email_address": "admin@fairhub.io", + "password": "Testingyeshello11!", + "code": pytest.global_admin_token, + }, + ) + + assert response.status_code == 201 + + +@pytest.fixture(scope="session") +def _create_editor_user(flask_app): + """Create an editor user for testing.""" + with flask_app.test_client() as _test_client: + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): + response = _test_client.post( + "/auth/signup", + json={ + "email_address": "editor@fairhub.io", + "password": "Testingyeshello11!", + "code": pytest.global_editor_token, + }, + ) + + assert response.status_code == 201 + + +@pytest.fixture(scope="session") +def _create_viewer_user(flask_app): + """Create a viewer user for testing.""" + with flask_app.test_client() as _test_client: + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): + response = _test_client.post( + "/auth/signup", + json={ + "email_address": "viewer@fairhub.io", + "password": "Testingyeshello11!", + "code": pytest.global_viewer_token, + }, + ) + + assert response.status_code == 201 + + +@pytest.fixture(scope="session") +def clients(flask_app): + """Signs in all clients needed for testing""" + ctx = flask_app.app_context() + ctx.push() + + _logged_in_client = flask_app.test_client() + _admin_client = flask_app.test_client() + _editor_client = flask_app.test_client() + _viewer_client = flask_app.test_client() + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): - response = _test_client.post( + response = _logged_in_client.post( "/auth/login", json={ "email_address": "test@fairhub.io", "password": "Testingyeshello11!", }, ) + assert response.status_code == 200 + response = _admin_client.post( + "/auth/login", + json={ + "email_address": "admin@fairhub.io", + "password": "Testingyeshello11!", + }, + ) assert response.status_code == 200 - yield _test_client + response = _editor_client.post( + "/auth/login", + json={ + "email_address": "editor@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + assert response.status_code == 200 + + response = _viewer_client.post( + "/auth/login", + json={ + "email_address": "viewer@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + assert response.status_code == 200 + + yield _logged_in_client, _admin_client, _editor_client, _viewer_client + + ctx.pop() diff --git a/tests/functional/test_server_launch.py b/tests/functional/test_server_launch.py index 48eca89b..7121b9d6 100644 --- a/tests/functional/test_server_launch.py +++ b/tests/functional/test_server_launch.py @@ -11,10 +11,8 @@ def test_server_launch(_test_client): # Create a test client using the Flask application configured for testing response = _test_client.get("/echo") - # Temporary test until we have authentication - # assert response.status_code == 403 - # Convert the response data from JSON to a Python dictionary + assert response.status_code == 200 response_data = json.loads(response.data) # Check the response is correct diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index 6cf6e37e..2b9ce2e6 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -27,12 +27,39 @@ def test_post_study(_logged_in_client): pytest.global_study_id = response_data -def test_get_all_studies(_logged_in_client): +def test_invite_study_contributor(_test_invite_study_contributor): + """Invite contributors to study.""" + print("Contributors invited to study") + + +def test_create_admin_user(_create_admin_user): + """Admin User created for permissions testing""" + print("Admin user created for testing") + + +def test_create_editor_user(_create_editor_user): + """Editor User created for permissions testing""" + print("Editor user created for testing") + + +def test_viewer_editor_user(_create_viewer_user): + """Viewer User created for permissions testing""" + print("Viewer user created for testing") + + +def test_signin_all_clients(clients): + """Signs in all clients for verifying permissions before testing continues.""" + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + print("All clients signed in for testing") + + +def test_get_all_studies(clients): """ GIVEN a Flask application configured for testing WHEN the '/study' endpoint is requested (GET) THEN check that the response is valid """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients response = _logged_in_client.get("/study") assert response.status_code == 200 @@ -40,13 +67,23 @@ def test_get_all_studies(_logged_in_client): assert len(response_data) == 1 # Only one study created + # Test responses for all clients and verify permissions + admin_response = _admin_client.get("/study") + editor_response = _editor_client.get("/study") + viewer_response = _viewer_client.get("/study") + + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 -def test_update_study(_logged_in_client): + +def test_update_study(clients): """ GIVEN a study ID WHEN the '/study' endpoint is requested (PUT) THEN check that the study is updated with the inputed data """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -65,32 +102,103 @@ def test_update_study(_logged_in_client): assert response_data["image"] == pytest.global_study_id["image"] # type: ignore assert response_data["id"] == pytest.global_study_id["id"] # type: ignore + admin_response = _admin_client.put( + f"/study/{study_id}", + json={ + "title": "Admin Study Title", + "image": pytest.global_study_id["image"], # type: ignore + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + pytest.global_study_id = admin_response_data + + assert admin_response_data["title"] == "Admin Study Title" + assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore + + editor_response = _editor_client.put( + f"/study/{study_id}", + json={ + "title": "Editor Study Title", + "image": pytest.global_study_id["image"], # type: ignore + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + pytest.global_study_id = editor_response_data + + assert editor_response_data["title"] == "Editor Study Title" + assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore + + viewer_response = _viewer_client.put( + f"/study/{study_id}", + json={ + "title": "Viewer Study Title", + "image": pytest.global_study_id["image"], # type: ignore + }, + ) + + # response will be 403 due to Viewer permissions + assert viewer_response.status_code == 403 + -def test_get_study_by_id(_logged_in_client): +def test_get_study_by_id(clients): """ GIVEN a study ID WHEN the '/study/{study_id}' endpoint is requested (GET) THEN check that the response is valid """ - response = _logged_in_client.get(f"/study/{pytest.global_study_id['id']}") # type: ignore # pylint: disable=line-too-long # noqa: E501 + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore - # Convert the response data from JSON to a Python dictionary + response = _logged_in_client.get(f"/study/{study_id}") + admin_response = _admin_client.get(f"/study/{study_id}") + editor_response = _editor_client.get(f"/study/{study_id}") + viewer_response = _viewer_client.get(f"/study/{study_id}") + + # Verify all clients have access to study + # Then convert the response data from JSON to a Python dictionary + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 assert response.status_code == 200 response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) # Check the response is correct assert response_data["id"] == pytest.global_study_id["id"] # type: ignore assert response_data["title"] == pytest.global_study_id["title"] # type: ignore assert response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore + assert admin_response_data["title"] == pytest.global_study_id["title"] # type: ignore + assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore + + assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore + assert editor_response_data["title"] == pytest.global_study_id["title"] # type: ignore + assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore + + assert viewer_response_data["id"] == pytest.global_study_id["id"] # type: ignore + assert viewer_response_data["title"] == pytest.global_study_id["title"] # type: ignore + assert viewer_response_data["image"] == pytest.global_study_id["image"] # type: ignore -def test_delete_studies_created(_logged_in_client): + +def test_delete_studies_created(clients): """ Given a Flask application configured for testing WHEN the '/study/{study_id}' endpoint is requested (DELETE) THEN check that the response is valid (200) """ - # create study first to then delete + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + + # Create a temporary study to delete as the original study + # Is needed to test all other endpoints response = _logged_in_client.post( "/study", json={ @@ -103,11 +211,17 @@ def test_delete_studies_created(_logged_in_client): response_data = json.loads(response.data) study_id = response_data["id"] - # delete study + admin_response = _admin_client.delete(f"/study/{study_id}") + editor_response = _editor_client.delete(f"/study/{study_id}") + viewer_response = _viewer_client.delete(f"/study/{study_id}") + + # Verify all clients have no access to newly created study + # They are only invited contributors to the original study created at the start of testing + assert admin_response.status_code == 403 + assert editor_response.status_code == 403 + assert viewer_response.status_code == 403 + + # delete temporary study response = _logged_in_client.delete(f"/study/{study_id}") assert response.status_code == 204 - - response_get = _logged_in_client.get("/study") - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 1 diff --git a/tests/functional/test_study_dataset_api.py b/tests/functional/test_study_dataset_api.py index c6985a52..c77d2562 100644 --- a/tests/functional/test_study_dataset_api.py +++ b/tests/functional/test_study_dataset_api.py @@ -4,27 +4,13 @@ import pytest -def test_get_all_dataset_from_study(_logged_in_client): - """ - GIVEN a Flask application configured for testing and a study ID - WHEN the '/dataset/{study_id}' endpoint is requested (GET) - THEN check that the response is valid and retrieves the dataset content - """ - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/dataset") - - assert response.status_code == 200 - # response_data = json.loads(response.data) - # print(response_data) - - -def test_post_dataset(_logged_in_client): +def test_post_dataset(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/dataset/{study_id}' endpoint is requested (POST) THEN check that the response is valid and creates a dataset """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( @@ -39,28 +25,209 @@ def test_post_dataset(_logged_in_client): response_data = json.loads(response.data) pytest.global_dataset_id = response_data["id"] + assert response_data["title"] == "Dataset Title" + assert response_data["description"] == "Dataset Description" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset", + json={ + "title": "Admin Dataset Title", + "description": "Admin Dataset Description", + }, + ) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_id_admin = admin_response_data["id"] + assert admin_response_data["title"] == "Admin Dataset Title" + assert admin_response_data["description"] == "Admin Dataset Description" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset", + json={ + "title": "Editor Dataset Title", + "description": "Editor Dataset Description", + }, + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_id_editor = editor_response_data["id"] + + assert editor_response_data["title"] == "Editor Dataset Title" + assert editor_response_data["description"] == "Editor Dataset Description" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset", + json={ + "title": "Viewer Dataset Title", + "description": "Viewer Dataset Description", + }, + ) + + # response will be 403 due to Viewer permissions + assert viewer_response.status_code == 403 + + +def test_get_all_dataset_from_study(clients): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/dataset/{study_id}' endpoint is requested (GET) + THEN check that the response is valid and retrieves the dataset content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore -def test_get_dataset_from_study(_logged_in_client): + response = _logged_in_client.get(f"/study/{study_id}/dataset") + admin_response = _admin_client.get(f"/study/{study_id}/dataset") + editor_response = _editor_client.get(f"/study/{study_id}/dataset") + viewer_response = _viewer_client.get(f"/study/{study_id}/dataset") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # Three datasets should be returned + assert len(response_data) == 3 + assert len(admin_response_data) == 3 + assert len(editor_response_data) == 3 + assert len(viewer_response_data) == 3 + + assert response_data[0]["title"] == "Dataset Title" + assert response_data[0]["description"] == "Dataset Description" + assert response_data[1]["title"] == "Admin Dataset Title" + assert response_data[1]["description"] == "Admin Dataset Description" + assert response_data[2]["title"] == "Editor Dataset Title" + assert response_data[2]["description"] == "Editor Dataset Description" + + assert admin_response_data[0]["title"] == "Dataset Title" + assert admin_response_data[0]["description"] == "Dataset Description" + assert admin_response_data[1]["title"] == "Admin Dataset Title" + assert admin_response_data[1]["description"] == "Admin Dataset Description" + assert admin_response_data[2]["title"] == "Editor Dataset Title" + assert admin_response_data[2]["description"] == "Editor Dataset Description" + + assert editor_response_data[0]["title"] == "Dataset Title" + assert editor_response_data[0]["description"] == "Dataset Description" + assert editor_response_data[1]["title"] == "Admin Dataset Title" + assert editor_response_data[1]["description"] == "Admin Dataset Description" + assert editor_response_data[2]["title"] == "Editor Dataset Title" + assert editor_response_data[2]["description"] == "Editor Dataset Description" + + assert viewer_response_data[0]["title"] == "Dataset Title" + assert viewer_response_data[0]["description"] == "Dataset Description" + assert viewer_response_data[1]["title"] == "Admin Dataset Title" + assert viewer_response_data[1]["description"] == "Admin Dataset Description" + assert viewer_response_data[2]["title"] == "Editor Dataset Title" + assert viewer_response_data[2]["description"] == "Editor Dataset Description" + + +def test_get_dataset_from_study(clients): """ Given a Flask application configured for testing and a study ID When the '/dataset/{study_id}/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + admin_dataset_id = pytest.global_dataset_id_admin + editor_dataset_id = pytest.global_dataset_id_editor response = _logged_in_client.get(f"/study/{study_id}/dataset/{dataset_id}") + admin_response = _admin_client.get(f"/study/{study_id}/dataset/{dataset_id}") + editor_response = _editor_client.get(f"/study/{study_id}/dataset/{dataset_id}") + viewer_response = _viewer_client.get(f"/study/{study_id}/dataset/{dataset_id}") assert response.status_code == 200 - # response_data = json.loads(response.data) + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["title"] == "Dataset Title" + assert response_data["description"] == "Dataset Description" + assert admin_response_data["title"] == "Dataset Title" + assert admin_response_data["description"] == "Dataset Description" + assert editor_response_data["title"] == "Dataset Title" + assert editor_response_data["description"] == "Dataset Description" + assert viewer_response_data["title"] == "Dataset Title" + assert viewer_response_data["description"] == "Dataset Description" + + response = _logged_in_client.get(f"/study/{study_id}/dataset/{admin_dataset_id}") + admin_response = _admin_client.get(f"/study/{study_id}/dataset/{admin_dataset_id}") + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{admin_dataset_id}" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{admin_dataset_id}" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 -def test_delete_dataset_from_study(_logged_in_client): + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["title"] == "Admin Dataset Title" + assert response_data["description"] == "Admin Dataset Description" + assert admin_response_data["title"] == "Admin Dataset Title" + assert admin_response_data["description"] == "Admin Dataset Description" + assert editor_response_data["title"] == "Admin Dataset Title" + assert editor_response_data["description"] == "Admin Dataset Description" + assert viewer_response_data["title"] == "Admin Dataset Title" + assert viewer_response_data["description"] == "Admin Dataset Description" + + response = _logged_in_client.get(f"/study/{study_id}/dataset/{editor_dataset_id}") + admin_response = _admin_client.get(f"/study/{study_id}/dataset/{editor_dataset_id}") + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{editor_dataset_id}" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{editor_dataset_id}" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["title"] == "Editor Dataset Title" + assert response_data["description"] == "Editor Dataset Description" + assert admin_response_data["title"] == "Editor Dataset Title" + assert admin_response_data["description"] == "Editor Dataset Description" + assert editor_response_data["title"] == "Editor Dataset Title" + assert editor_response_data["description"] == "Editor Dataset Description" + assert viewer_response_data["title"] == "Editor Dataset Title" + assert viewer_response_data["description"] == "Editor Dataset Description" + + +def test_delete_dataset_from_study(clients): """ Given a Flask application configured for testing and a study ID When the '/dataset/{study_id}/{dataset_id}' endpoint is requested (DELETE) Then check that the response is valid and deletes the dataset """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients # create a new dataset and delete it afterwards study_id = pytest.global_study_id["id"] # type: ignore @@ -71,93 +238,69 @@ def test_delete_dataset_from_study(_logged_in_client): "description": "Dataset Description", }, ) - - assert response.status_code == 201 - response_data = json.loads(response.data) - dataset_id = response_data["id"] - - # delete dataset - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}", + admin_response = _admin_client.post( + f"/study/{study_id}/dataset", + json={ + "title": "Admin Delete Me", + "description": "Dataset Description", + }, ) - - assert response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/dataset") - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 1 - - -def test_post_dataset_version(_logged_in_client): - """ - Given a Flask application configured for testing, study ID and a dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/version' - endpoint is requested (POST) - Then check that the response is valid and creates a dataset version - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/version", + editor_response = _editor_client.post( + f"/study/{study_id}/dataset", json={ - "title": "Dataset Version 1.0", - "published": False, - "doi": "doi:test", - "changelog": "changelog testing here", + "title": "Editor Delete Me", + "description": "Dataset Description", }, ) assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_version_id = response_data["id"] - - assert response_data["title"] == "Dataset Version 1.0" - assert response_data["published"] is False - assert response_data["doi"] == "doi:test" - assert response_data["changelog"] == "changelog testing here" + assert admin_response.status_code == 201 + assert editor_response.status_code == 201 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + dataset_id = response_data["id"] + admin_dataset_id = admin_response_data["id"] + editor_dataset_id = editor_response_data["id"] -def test_get_all_dataset_versions(_logged_in_client): - """ - Given a Flask application configured for testing, study ID and a dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/version' endpoint is requested (GET) - Then check that the response is valid and retrieves all dataset versions - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/version", + # delete temporary datasets + viewer_response = _viewer_client.delete(f"/study/{study_id}/dataset/{dataset_id}") + delete_response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}", + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{admin_dataset_id}", + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{editor_dataset_id}", ) - assert response.status_code == 200 - - -def test_get_dataset_version(_logged_in_client): - """ - Given a Flask application configured for testing, study ID, dataset ID and version ID - When the '/study/{study_id}/dataset/{dataset_id}/version/{version_id}' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset version - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - version_id = pytest.global_dataset_version_id + assert viewer_response.status_code == 403 + assert delete_response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + # delete original datasets created by admin and editor as they won't be used in other tests + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{pytest.global_dataset_id_admin}", + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{pytest.global_dataset_id_editor}", ) - assert response.status_code == 200 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_put_dataset_version(_logged_in_client): +def test_put_dataset_version(clients): """ Given a Flask application configured for testing, study ID, dataset ID and version ID When the '/study/{study_id}/dataset/{dataset_id}/version/{version_id}' endpoint is requested (PUT) Then check that the response is valid and updates the dataset version """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients # study_id = pytest.global_study_id["id"] # dataset_id = pytest.global_dataset_id # version_id = pytest.global_dataset_version_id diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index e0a84199..55118abe 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -2,31 +2,18 @@ """Tests for the Dataset's Metadata API endpoints""" import json +from time import sleep import pytest # ------------------- ACCESS METADATA ------------------- # -def test_get_dataset_access_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset access metadata content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" - ) - assert response.status_code == 200 - - -def test_put_dataset_access_metadata(_logged_in_client): +def test_put_dataset_access_metadata(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (PUT) Then check that the response is valid and updates the dataset access metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -48,32 +35,119 @@ def test_put_dataset_access_metadata(_logged_in_client): assert response_data["url"] == "google.com" assert response_data["url_last_checked"] == 123 + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + json={ + "type": "admin type", + "description": "admin description", + "url": "google.com", + "url_last_checked": 123, + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # -def test_get_alternative_identifier(_logged_in_client): + assert admin_response_data["type"] == "admin type" + assert admin_response_data["description"] == "admin description" + assert admin_response_data["url"] == "google.com" + assert admin_response_data["url_last_checked"] == 123 + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + json={ + "type": "editor type", + "description": "editor description", + "url": "google.com", + "url_last_checked": 123, + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["type"] == "editor type" + assert editor_response_data["description"] == "editor description" + assert editor_response_data["url"] == "google.com" + assert editor_response_data["url_last_checked"] == 123 + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + json={ + "type": "viewer type", + "description": "viewer description", + "url": "google.com", + "url_last_checked": 123, + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_access_metadata(clients): """ Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset alternative identifier content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset access metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access" ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + # Since editor was the last successful PUT request, the response data should match + assert response_data["type"] == "editor type" + assert response_data["description"] == "editor description" + assert response_data["url"] == "google.com" + assert response_data["url_last_checked"] == 123 + + assert admin_response_data["type"] == "editor type" + assert admin_response_data["description"] == "editor description" + assert admin_response_data["url"] == "google.com" + assert admin_response_data["url_last_checked"] == 123 + + assert editor_response_data["type"] == "editor type" + assert editor_response_data["description"] == "editor description" + assert editor_response_data["url"] == "google.com" + assert editor_response_data["url_last_checked"] == 123 -def test_post_alternative_identifier(_logged_in_client): + assert viewer_response_data["type"] == "editor type" + assert viewer_response_data["description"] == "editor description" + assert viewer_response_data["url"] == "google.com" + assert viewer_response_data["url_last_checked"] == 123 + + +# ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # +def test_post_alternative_identifier(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' endpoint is requested (POST) Then check that the response is valid and creates the dataset alternative identifier """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -86,6 +160,8 @@ def test_post_alternative_identifier(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -94,54 +170,158 @@ def test_post_alternative_identifier(_logged_in_client): assert response_data[0]["identifier"] == "identifier test" assert response_data[0]["type"] == "ARK" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "admin test", + "type": "ARK", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "editor test", + "type": "ARK", + } + ], + ) + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", + json=[ + { + "identifier": "viewer test", + "type": "ARK", + } + ], + ) + + assert admin_response.status_code == 201 + assert editor_response.status_code == 201 + assert viewer_response.status_code == 403 + + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + pytest.global_alternative_identifier_id_admin = admin_response_data[0]["id"] + pytest.global_alternative_identifier_id_editor = editor_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "admin test" + assert admin_response_data[0]["type"] == "ARK" + assert editor_response_data[0]["identifier"] == "editor test" + assert editor_response_data[0]["type"] == "ARK" + -def test_delete_alternative_identifier(_logged_in_client): +def test_get_alternative_identifier(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset alternative identifier content + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset alternative identifier content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - identifier_id = pytest.global_alternative_identifier_id - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" ) - - assert response.status_code == 204 - response_get = _logged_in_client.get( + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + ) + viewer_response = _viewer_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 0 + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -# ------------------- CONSENT METADATA ------------------- # -def test_get_dataset_consent_metadata(_logged_in_client): + assert response_data[0]["identifier"] == "identifier test" + assert response_data[0]["type"] == "ARK" + assert response_data[1]["identifier"] == "admin test" + assert response_data[1]["type"] == "ARK" + assert response_data[2]["identifier"] == "editor test" + assert response_data[2]["type"] == "ARK" + + assert admin_response_data[0]["identifier"] == "identifier test" + assert admin_response_data[0]["type"] == "ARK" + assert admin_response_data[1]["identifier"] == "admin test" + assert admin_response_data[1]["type"] == "ARK" + assert admin_response_data[2]["identifier"] == "editor test" + assert admin_response_data[2]["type"] == "ARK" + + assert editor_response_data[0]["identifier"] == "identifier test" + assert editor_response_data[0]["type"] == "ARK" + assert editor_response_data[1]["identifier"] == "admin test" + assert editor_response_data[1]["type"] == "ARK" + assert editor_response_data[2]["identifier"] == "editor test" + assert editor_response_data[2]["type"] == "ARK" + + assert viewer_response_data[0]["identifier"] == "identifier test" + assert viewer_response_data[0]["type"] == "ARK" + assert viewer_response_data[1]["identifier"] == "admin test" + assert viewer_response_data[1]["type"] == "ARK" + assert viewer_response_data[2]["identifier"] == "editor test" + assert viewer_response_data[2]["type"] == "ARK" + + +def test_delete_alternative_identifier(clients): """ Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset consent metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset alternative identifier content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + identifier_id = pytest.global_alternative_identifier_id + admin_identifier_id = pytest.global_alternative_identifier_id_admin + editor_identifier_id = pytest.global_alternative_identifier_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + # verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + ) + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{admin_identifier_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{editor_identifier_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_put_dataset_consent_metadata(_logged_in_client): +# ------------------- CONSENT METADATA ------------------- # +def test_put_dataset_consent_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (PUT) Then check that the response is valid and updates the dataset consent metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -169,15 +349,136 @@ def test_put_dataset_consent_metadata(_logged_in_client): assert response_data["no_methods"] is True assert response_data["details"] == "test" + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + json={ + "type": "admin test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "admin details test", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["type"] == "admin test" + assert admin_response_data["details"] == "admin details test" + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + json={ + "type": "editor test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "editor details test", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["type"] == "editor test" + assert editor_response_data["details"] == "editor details test" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + json={ + "type": "viewer test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "viewer details test", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_consent_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset consent metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # Editor was the last successful PUT request, so the response data should match + assert response_data["type"] == "editor test" + assert response_data["noncommercial"] is True + assert response_data["geog_restrict"] is True + assert response_data["research_type"] is True + assert response_data["genetic_only"] is True + assert response_data["no_methods"] is True + assert response_data["details"] == "editor details test" + + assert admin_response_data["type"] == "editor test" + assert admin_response_data["noncommercial"] is True + assert admin_response_data["geog_restrict"] is True + assert admin_response_data["research_type"] is True + assert admin_response_data["genetic_only"] is True + assert admin_response_data["no_methods"] is True + assert admin_response_data["details"] == "editor details test" + + assert editor_response_data["type"] == "editor test" + assert editor_response_data["noncommercial"] is True + assert editor_response_data["geog_restrict"] is True + assert editor_response_data["research_type"] is True + assert editor_response_data["genetic_only"] is True + assert editor_response_data["no_methods"] is True + assert editor_response_data["details"] == "editor details test" + + assert viewer_response_data["type"] == "editor test" + assert viewer_response_data["noncommercial"] is True + assert viewer_response_data["geog_restrict"] is True + assert viewer_response_data["research_type"] is True + assert viewer_response_data["genetic_only"] is True + assert viewer_response_data["no_methods"] is True + assert viewer_response_data["details"] == "editor details test" + # ------------------- CONTRIBUTOR METADATA ------------------- # -def test_post_dataset_contributor_metadata(_logged_in_client): +def test_post_dataset_contributor_metadata(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' endpoint is requested (POST) Then check that the response is valid and creates the dataset contributor metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -202,6 +503,8 @@ def test_post_dataset_contributor_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -219,72 +522,163 @@ def test_post_dataset_contributor_metadata(_logged_in_client): assert response_data[0]["affiliations"][0]["scheme"] == "uh" assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", + json=[ + { + "name": "Admin Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_contributor_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["name"] == "Admin Name here" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", + json=[ + { + "name": "Editor Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_contributor_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["name"] == "Editor Name here" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", + json=[ + { + "name": "Viewer Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + + assert viewer_response.status_code == 403 + -def test_get_dataset_contributor_metadata(_logged_in_client): +def test_get_dataset_contributor_metadata(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset contributor metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" + ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 -def test_delete_dataset_contributor_metadata(_logged_in_client): +def test_delete_dataset_contributor_metadata(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' endpoint is requested (DELETE) Then check that the response is valid and deletes the dataset contributor metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id contributor_id = pytest.global_dataset_contributor_id + admin_contributor_id = pytest.global_dataset_contributor_id_admin + editor_contributor_id = pytest.global_dataset_contributor_id_editor + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{admin_contributor_id}" ) - assert response_get.status_code == 200 - - assert len(json.loads(response_get.data)) == 0 - - -# ------------------- CREATOR METADATA ------------------- # -def test_get_dataset_creator_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset creator metadata content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{editor_contributor_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_post_dataset_creator_metadata(_logged_in_client): +# ------------------- CREATOR METADATA ------------------- # +def test_post_dataset_creator_metadata(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (POST) Then check that the response is valid and creates the dataset creator metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -308,6 +702,8 @@ def test_post_dataset_creator_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -324,57 +720,313 @@ def test_post_dataset_creator_metadata(_logged_in_client): assert response_data[0]["affiliations"][0]["scheme"] == "uh" assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", + json=[ + { + "name": "admin Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_creator_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["name"] == "admin Name here" + assert admin_response_data[0]["name_type"] == "Personal" + assert admin_response_data[0]["name_identifier"] == "Name identifier" + assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_response_data[0]["creator"] is True + assert admin_response_data[0]["affiliations"][0]["name"] == "Test" + assert admin_response_data[0]["affiliations"][0]["identifier"] == "yes" + assert admin_response_data[0]["affiliations"][0]["scheme"] == "uh" + assert admin_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", + json=[ + { + "name": "Editor Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_creator_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["name"] == "Editor Name here" + assert editor_response_data[0]["name_type"] == "Personal" + assert editor_response_data[0]["name_identifier"] == "Name identifier" + assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_response_data[0]["creator"] is True + assert editor_response_data[0]["affiliations"][0]["name"] == "Test" + assert editor_response_data[0]["affiliations"][0]["identifier"] == "yes" + assert editor_response_data[0]["affiliations"][0]["scheme"] == "uh" + assert editor_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", + json=[ + { + "name": "Viewer Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + ) + + assert viewer_response.status_code == 403 + -def test_delete_dataset_creator_metadata(_logged_in_client): +def test_get_dataset_creator_metadata(clients): """ Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset creator metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset creator metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - creator_id = pytest.global_dataset_creator_id - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" + ) + viewer_response = _viewer_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 0 + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + assert len(response_data) == 3 + assert len(admin_response_data) == 3 + assert len(editor_response_data) == 3 + assert len(viewer_response_data) == 3 -# ------------------- DATE METADATA ------------------- # -def test_get_dataset_date_metadata(_logged_in_client): + assert response_data[0]["id"] == pytest.global_dataset_creator_id + assert response_data[0]["name"] == "Name here" + assert response_data[0]["name_type"] == "Personal" + assert response_data[0]["name_identifier"] == "Name identifier" + assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert response_data[0]["creator"] is True + assert response_data[0]["affiliations"][0]["name"] == "Test" + assert response_data[0]["affiliations"][0]["identifier"] == "yes" + assert response_data[0]["affiliations"][0]["scheme"] == "uh" + assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert response_data[1]["id"] == pytest.global_dataset_creator_id_admin + assert response_data[1]["name"] == "admin Name here" + assert response_data[1]["name_type"] == "Personal" + assert response_data[1]["name_identifier"] == "Name identifier" + assert response_data[1]["name_identifier_scheme"] == "Name Scheme ID" + assert response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert response_data[1]["creator"] is True + assert response_data[1]["affiliations"][0]["name"] == "Test" + assert response_data[1]["affiliations"][0]["identifier"] == "yes" + assert response_data[1]["affiliations"][0]["scheme"] == "uh" + assert response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert response_data[2]["id"] == pytest.global_dataset_creator_id_editor + assert response_data[2]["name"] == "Editor Name here" + assert response_data[2]["name_type"] == "Personal" + assert response_data[2]["name_identifier"] == "Name identifier" + assert response_data[2]["name_identifier_scheme"] == "Name Scheme ID" + assert response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert response_data[2]["creator"] is True + assert response_data[2]["affiliations"][0]["name"] == "Test" + assert response_data[2]["affiliations"][0]["identifier"] == "yes" + assert response_data[2]["affiliations"][0]["scheme"] == "uh" + assert response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_response_data[0]["name"] == "Name here" + assert admin_response_data[0]["name_type"] == "Personal" + assert admin_response_data[0]["name_identifier"] == "Name identifier" + assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_response_data[0]["creator"] is True + assert admin_response_data[0]["affiliations"][0]["name"] == "Test" + assert admin_response_data[0]["affiliations"][0]["identifier"] == "yes" + assert admin_response_data[0]["affiliations"][0]["scheme"] == "uh" + assert admin_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data[1]["name"] == "admin Name here" + assert admin_response_data[1]["name_type"] == "Personal" + assert admin_response_data[1]["name_identifier"] == "Name identifier" + assert admin_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_response_data[1]["creator"] is True + assert admin_response_data[1]["affiliations"][0]["name"] == "Test" + assert admin_response_data[1]["affiliations"][0]["identifier"] == "yes" + assert admin_response_data[1]["affiliations"][0]["scheme"] == "uh" + assert admin_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data[2]["name"] == "Editor Name here" + assert admin_response_data[2]["name_type"] == "Personal" + assert admin_response_data[2]["name_identifier"] == "Name identifier" + assert admin_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_response_data[2]["creator"] is True + assert admin_response_data[2]["affiliations"][0]["name"] == "Test" + assert admin_response_data[2]["affiliations"][0]["identifier"] == "yes" + assert admin_response_data[2]["affiliations"][0]["scheme"] == "uh" + assert admin_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_response_data[0]["name"] == "Name here" + assert editor_response_data[0]["name_type"] == "Personal" + assert editor_response_data[0]["name_identifier"] == "Name identifier" + assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_response_data[0]["creator"] is True + assert editor_response_data[0]["affiliations"][0]["name"] == "Test" + assert editor_response_data[0]["affiliations"][0]["identifier"] == "yes" + assert editor_response_data[0]["affiliations"][0]["scheme"] == "uh" + assert editor_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data[1]["name"] == "admin Name here" + assert editor_response_data[1]["name_type"] == "Personal" + assert editor_response_data[1]["name_identifier"] == "Name identifier" + assert editor_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_response_data[1]["creator"] is True + assert editor_response_data[1]["affiliations"][0]["name"] == "Test" + assert editor_response_data[1]["affiliations"][0]["identifier"] == "yes" + assert editor_response_data[1]["affiliations"][0]["scheme"] == "uh" + assert editor_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data[2]["name"] == "Editor Name here" + assert editor_response_data[2]["name_type"] == "Personal" + assert editor_response_data[2]["name_identifier"] == "Name identifier" + assert editor_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_response_data[2]["creator"] is True + assert editor_response_data[2]["affiliations"][0]["name"] == "Test" + assert editor_response_data[2]["affiliations"][0]["identifier"] == "yes" + assert editor_response_data[2]["affiliations"][0]["scheme"] == "uh" + assert editor_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert viewer_response_data[0]["name"] == "Name here" + assert viewer_response_data[0]["name_type"] == "Personal" + assert viewer_response_data[0]["name_identifier"] == "Name identifier" + assert viewer_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_response_data[0]["creator"] is True + assert viewer_response_data[0]["affiliations"][0]["name"] == "Test" + assert viewer_response_data[0]["affiliations"][0]["identifier"] == "yes" + assert viewer_response_data[0]["affiliations"][0]["scheme"] == "uh" + assert viewer_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert viewer_response_data[1]["name"] == "admin Name here" + assert viewer_response_data[1]["name_type"] == "Personal" + assert viewer_response_data[1]["name_identifier"] == "Name identifier" + assert viewer_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_response_data[1]["creator"] is True + assert viewer_response_data[1]["affiliations"][0]["name"] == "Test" + assert viewer_response_data[1]["affiliations"][0]["identifier"] == "yes" + assert viewer_response_data[1]["affiliations"][0]["scheme"] == "uh" + assert viewer_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert viewer_response_data[2]["name"] == "Editor Name here" + assert viewer_response_data[2]["name_type"] == "Personal" + assert viewer_response_data[2]["name_identifier"] == "Name identifier" + assert viewer_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_response_data[2]["creator"] is True + assert viewer_response_data[2]["affiliations"][0]["name"] == "Test" + assert viewer_response_data[2]["affiliations"][0]["identifier"] == "yes" + assert viewer_response_data[2]["affiliations"][0]["scheme"] == "uh" + assert viewer_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + +def test_delete_dataset_creator_metadata(clients): """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset date metadata content + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset creator metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + creator_id = pytest.global_dataset_creator_id + admin_creator_id = pytest.global_dataset_creator_id_admin + editor_creator_id = pytest.global_dataset_creator_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{admin_creator_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{editor_creator_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_post_dataset_date_metadata(_logged_in_client): +# ------------------- DATE METADATA ------------------- # +def test_post_dataset_date_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' endpoint is requested (POST) Then check that the response is valid and creates the dataset date metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -382,6 +1034,8 @@ def test_post_dataset_date_metadata(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/metadata/date", json=[{"date": 20210101, "type": "Type", "information": "Info"}], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -391,52 +1045,154 @@ def test_post_dataset_date_metadata(_logged_in_client): assert response_data[0]["type"] == "Type" assert response_data[0]["information"] == "Info" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date", + json=[{"date": 20210102, "type": "Type", "information": "Info"}], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_date_id_admin = admin_response_data[0]["id"] -def test_delete_dataset_date_metadata(_logged_in_client): + assert admin_response_data[0]["date"] == 20210102 + assert admin_response_data[0]["type"] == "Type" + assert admin_response_data[0]["information"] == "Info" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date", + json=[{"date": 20210103, "type": "Type", "information": "Info"}], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_date_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["date"] == 20210103 + assert editor_response_data[0]["type"] == "Type" + assert editor_response_data[0]["information"] == "Info" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date", + json=[{"date": 20210101, "type": "Type", "information": "Info"}], + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_date_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset date metadata content + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset date metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - date_id = pytest.global_dataset_date_id - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date" + ) + viewer_response = _viewer_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/date" ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 0 + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + assert len(response_data) == 3 + assert len(admin_response_data) == 3 + assert len(editor_response_data) == 3 + assert len(viewer_response_data) == 3 -# ------------------- DE-IDENTIFICATION LEVEL METADATA ------------------- # -def test_get_dataset_deidentification_metadata(_logged_in_client): + assert response_data[0]["date"] == 20210101 + assert response_data[0]["type"] == "Type" + assert response_data[0]["information"] == "Info" + assert response_data[1]["date"] == 20210102 + assert response_data[1]["type"] == "Type" + assert response_data[1]["information"] == "Info" + assert response_data[2]["date"] == 20210103 + assert response_data[2]["type"] == "Type" + + assert admin_response_data[0]["date"] == 20210101 + assert admin_response_data[0]["type"] == "Type" + assert admin_response_data[0]["information"] == "Info" + assert admin_response_data[1]["date"] == 20210102 + assert admin_response_data[1]["type"] == "Type" + assert admin_response_data[1]["information"] == "Info" + assert admin_response_data[2]["date"] == 20210103 + assert admin_response_data[2]["type"] == "Type" + + assert editor_response_data[0]["date"] == 20210101 + assert editor_response_data[0]["type"] == "Type" + assert editor_response_data[0]["information"] == "Info" + assert editor_response_data[1]["date"] == 20210102 + assert editor_response_data[1]["type"] == "Type" + assert editor_response_data[1]["information"] == "Info" + assert editor_response_data[2]["date"] == 20210103 + assert editor_response_data[2]["type"] == "Type" + + assert viewer_response_data[0]["date"] == 20210101 + assert viewer_response_data[0]["type"] == "Type" + assert viewer_response_data[0]["information"] == "Info" + assert viewer_response_data[1]["date"] == 20210102 + assert viewer_response_data[1]["type"] == "Type" + assert viewer_response_data[1]["information"] == "Info" + assert viewer_response_data[2]["date"] == 20210103 + assert viewer_response_data[2]["type"] == "Type" + + +def test_delete_dataset_date_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - de-identification metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset date metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + date_id = pytest.global_dataset_date_id + admin_date_id = pytest.global_dataset_date_id_admin + editor_date_id = pytest.global_dataset_date_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{admin_date_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{editor_date_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_put_dataset_deidentification_metadata(_logged_in_client): +# ------------------- DE-IDENTIFICATION LEVEL METADATA ------------------- # +def test_put_dataset_deidentification_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' @@ -444,6 +1200,7 @@ def test_put_dataset_deidentification_metadata(_logged_in_client): Then check that the response is valid and updates the dataset de-identification metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -471,97 +1228,447 @@ def test_put_dataset_deidentification_metadata(_logged_in_client): assert response_data["k_anon"] is True assert response_data["details"] == "Details" + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", + json={ + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- DESCRIPTION METADATA ------------------- # -def test_get_dataset_descriptions_metadata(_logged_in_client): + assert admin_response_data["type"] == "Level" + assert admin_response_data["direct"] is True + assert admin_response_data["hipaa"] is True + assert admin_response_data["dates"] is True + assert admin_response_data["nonarr"] is True + assert admin_response_data["k_anon"] is True + assert admin_response_data["details"] == "Details" + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", + json={ + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["type"] == "Level" + assert editor_response_data["direct"] is True + assert editor_response_data["hipaa"] is True + assert editor_response_data["dates"] is True + assert editor_response_data["nonarr"] is True + assert editor_response_data["k_anon"] is True + assert editor_response_data["details"] == "Details" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", + json={ + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_deidentification_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' + When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - description metadata content + de-identification metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -def test_post_dataset_description_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' + assert response_data["type"] == "Level" + assert response_data["direct"] is True + assert response_data["hipaa"] is True + assert response_data["dates"] is True + assert response_data["nonarr"] is True + assert response_data["k_anon"] is True + assert response_data["details"] == "Details" + + assert admin_response_data["type"] == "Level" + assert admin_response_data["direct"] is True + assert admin_response_data["hipaa"] is True + assert admin_response_data["dates"] is True + assert admin_response_data["nonarr"] is True + assert admin_response_data["k_anon"] is True + assert admin_response_data["details"] == "Details" + + assert editor_response_data["type"] == "Level" + assert editor_response_data["direct"] is True + assert editor_response_data["hipaa"] is True + assert editor_response_data["dates"] is True + assert editor_response_data["nonarr"] is True + assert editor_response_data["k_anon"] is True + assert editor_response_data["details"] == "Details" + + assert viewer_response_data["type"] == "Level" + assert viewer_response_data["direct"] is True + assert viewer_response_data["hipaa"] is True + assert viewer_response_data["dates"] is True + assert viewer_response_data["nonarr"] is True + assert viewer_response_data["k_anon"] is True + assert viewer_response_data["details"] == "Details" + + +# ------------------- DESCRIPTION METADATA ------------------- # +def test_post_dataset_descriptions_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' endpoint is requested (POST) Then check that the response is valid and creates the dataset description metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Description", "type": "Methods"}], + json=[{"description": "Owner Description", "type": "Methods"}], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_description_id = response_data[0]["id"] - assert response_data[0]["description"] == "Description" + assert response_data[0]["description"] == "Owner Description" assert response_data[0]["type"] == "Methods" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description", + json=[{"description": "Admin Description", "type": "Methods"}], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) -def test_delete_dataset_description_metadata(_logged_in_client): + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_description_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["description"] == "Admin Description" + assert admin_response_data[0]["type"] == "Methods" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description", + json=[{"description": "Editor Description", "type": "Methods"}], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_description_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["description"] == "Editor Description" + assert editor_response_data[0]["type"] == "Methods" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description", + json=[{"description": "Viewer Description", "type": "Methods"}], + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_descriptions_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset description metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - description_id = pytest.global_dataset_description_id - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( + admin_response = _admin_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/description" ) - assert ( - len(json.loads(response_get.data)) == 1 - and json.loads(response_get.data)[0]["type"] == "Abstract" + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description" ) + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 -# ------------------- FUNDER METADATA ------------------- # -def test_get_dataset_funder_metadata(_logged_in_client): + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # Dataset description is included in the responses + assert len(response_data) == 4 + assert len(admin_response_data) == 4 + assert len(editor_response_data) == 4 + assert len(viewer_response_data) == 4 + + # seacrch for type abstract index + main_descrip = next( + (index for (index, d) in enumerate(response_data) if d["type"] == "Abstract"), + None, + ) + a_main_descrip = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["type"] == "Abstract" + ), + None, + ) + e_main_descrip = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["type"] == "Abstract" + ), + None, + ) + v_main_descrip = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["type"] == "Abstract" + ), + None, + ) + + # search for owner description + # pylint: disable=line-too-long + own_descrip = next( + ( + index + for (index, d) in enumerate(response_data) + if d["description"] == "Owner Description" + ), + None, + ) + a_own_descrip = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["description"] == "Owner Description" + ), + None, + ) + e_own_descrip = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["description"] == "Owner Description" + ), + None, + ) + v_own_descrip = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["description"] == "Owner Description" + ), + None, + ) + + # search for admin description + admin_descrip = next( + ( + index + for (index, d) in enumerate(response_data) + if d["description"] == "Admin Description" + ), + None, + ) + a_admin_descrip = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["description"] == "Admin Description" + ), + None, + ) + e_admin_descrip = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["description"] == "Admin Description" + ), + None, + ) + v_admin_descrip = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["description"] == "Admin Description" + ), + None, + ) + + # search for editor description + edit_descrip = next( + ( + index + for (index, d) in enumerate(response_data) + if d["description"] == "Editor Description" + ), + None, + ) + a_edit_descrip = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["description"] == "Editor Description" + ), + None, + ) + e_edit_descrip = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["description"] == "Editor Description" + ), + None, + ) + v_edit_descrip = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["description"] == "Editor Description" + ), + None, + ) + + assert response_data[main_descrip]["description"] == "Dataset Description" + assert response_data[main_descrip]["type"] == "Abstract" + assert response_data[own_descrip]["description"] == "Owner Description" + assert response_data[own_descrip]["type"] == "Methods" + assert response_data[admin_descrip]["description"] == "Admin Description" + assert response_data[admin_descrip]["type"] == "Methods" + assert response_data[edit_descrip]["description"] == "Editor Description" + assert response_data[edit_descrip]["type"] == "Methods" + + assert admin_response_data[a_main_descrip]["description"] == "Dataset Description" + assert admin_response_data[a_main_descrip]["type"] == "Abstract" + assert admin_response_data[a_own_descrip]["description"] == "Owner Description" + assert admin_response_data[a_own_descrip]["type"] == "Methods" + assert admin_response_data[a_admin_descrip]["description"] == "Admin Description" + assert admin_response_data[a_admin_descrip]["type"] == "Methods" + assert admin_response_data[a_edit_descrip]["description"] == "Editor Description" + assert admin_response_data[a_edit_descrip]["type"] == "Methods" + + assert editor_response_data[e_main_descrip]["description"] == "Dataset Description" + assert editor_response_data[e_main_descrip]["type"] == "Abstract" + assert editor_response_data[e_own_descrip]["description"] == "Owner Description" + assert editor_response_data[e_own_descrip]["type"] == "Methods" + assert editor_response_data[e_admin_descrip]["description"] == "Admin Description" + assert editor_response_data[e_admin_descrip]["type"] == "Methods" + assert editor_response_data[e_edit_descrip]["description"] == "Editor Description" + assert editor_response_data[e_edit_descrip]["type"] == "Methods" + + assert viewer_response_data[v_main_descrip]["description"] == "Dataset Description" + assert viewer_response_data[v_main_descrip]["type"] == "Abstract" + assert viewer_response_data[v_own_descrip]["description"] == "Owner Description" + assert viewer_response_data[v_own_descrip]["type"] == "Methods" + assert viewer_response_data[v_admin_descrip]["description"] == "Admin Description" + assert viewer_response_data[v_admin_descrip]["type"] == "Methods" + assert viewer_response_data[v_edit_descrip]["description"] == "Editor Description" + assert viewer_response_data[v_edit_descrip]["type"] == "Methods" + + +def test_delete_dataset_description_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - funder metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + description metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + description_id = pytest.global_dataset_description_id + admin_description_id = pytest.global_dataset_description_id_admin + editor_description_id = pytest.global_dataset_description_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{description_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{admin_description_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/description/{editor_description_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_post_dataset_funder_metadata(_logged_in_client): +# ------------------- FUNDER METADATA ------------------- # +def test_post_dataset_funder_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' @@ -569,6 +1676,7 @@ def test_post_dataset_funder_metadata(_logged_in_client): Then check that the response is valid and creates the dataset funder metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -586,6 +1694,8 @@ def test_post_dataset_funder_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -599,52 +1709,247 @@ def test_post_dataset_funder_metadata(_logged_in_client): assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" assert response_data[0]["identifier_type"] == "Identifier Type" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + json=[ + { + "name": "Admin Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_funder_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["name"] == "Admin Name" + assert admin_response_data[0]["award_number"] == "award number" + assert admin_response_data[0]["award_title"] == "Award Title" + assert admin_response_data[0]["award_uri"] == "Award URI" + assert admin_response_data[0]["identifier"] == "Identifier" + assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_response_data[0]["identifier_type"] == "Identifier Type" -def test_delete_dataset_funder_metadata(_logged_in_client): + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + json=[ + { + "name": "Editor Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_funder_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["name"] == "Editor Name" + assert editor_response_data[0]["award_number"] == "award number" + assert editor_response_data[0]["award_title"] == "Award Title" + assert editor_response_data[0]["award_uri"] == "Award URI" + assert editor_response_data[0]["identifier"] == "Identifier" + assert ( + editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + ) # pylint: disable=line-too-long + assert editor_response_data[0]["identifier_type"] == "Identifier Type" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + json=[ + { + "name": "Viewer Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_funder_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset funder metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - funder_id = pytest.global_dataset_funder_id - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + ) + viewer_response = _viewer_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 0 + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 -# ------------------- OTHER METADATA ------------------- # -def test_get_other_dataset_metadata(_logged_in_client): + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert len(response_data) == 3 + assert len(admin_response_data) == 3 + assert len(editor_response_data) == 3 + assert len(viewer_response_data) == 3 + + assert response_data[0]["name"] == "Name" + assert response_data[0]["award_number"] == "award number" + assert response_data[0]["award_title"] == "Award Title" + assert response_data[0]["award_uri"] == "Award URI" + assert response_data[0]["identifier"] == "Identifier" + assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert response_data[0]["identifier_type"] == "Identifier Type" + assert response_data[1]["name"] == "Admin Name" + assert response_data[1]["award_number"] == "award number" + assert response_data[1]["award_title"] == "Award Title" + assert response_data[1]["award_uri"] == "Award URI" + assert response_data[1]["identifier"] == "Identifier" + assert response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert response_data[1]["identifier_type"] == "Identifier Type" + assert response_data[2]["name"] == "Editor Name" + assert response_data[2]["award_number"] == "award number" + assert response_data[2]["award_title"] == "Award Title" + assert response_data[2]["award_uri"] == "Award URI" + assert response_data[2]["identifier"] == "Identifier" + assert response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert response_data[2]["identifier_type"] == "Identifier Type" + + assert admin_response_data[0]["name"] == "Name" + assert admin_response_data[0]["award_number"] == "award number" + assert admin_response_data[0]["award_title"] == "Award Title" + assert admin_response_data[0]["award_uri"] == "Award URI" + assert admin_response_data[0]["identifier"] == "Identifier" + assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_response_data[0]["identifier_type"] == "Identifier Type" + assert admin_response_data[1]["name"] == "Admin Name" + assert admin_response_data[1]["award_number"] == "award number" + assert admin_response_data[1]["award_title"] == "Award Title" + assert admin_response_data[1]["award_uri"] == "Award URI" + assert admin_response_data[1]["identifier"] == "Identifier" + assert admin_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_response_data[1]["identifier_type"] == "Identifier Type" + assert admin_response_data[2]["name"] == "Editor Name" + assert admin_response_data[2]["award_number"] == "award number" + assert admin_response_data[2]["award_title"] == "Award Title" + assert admin_response_data[2]["award_uri"] == "Award URI" + assert admin_response_data[2]["identifier"] == "Identifier" + assert admin_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_response_data[2]["identifier_type"] == "Identifier Type" + + assert editor_response_data[0]["name"] == "Name" + assert editor_response_data[0]["award_number"] == "award number" + assert editor_response_data[0]["award_title"] == "Award Title" + assert editor_response_data[0]["award_uri"] == "Award URI" + assert editor_response_data[0]["identifier"] == "Identifier" + assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_response_data[0]["identifier_type"] == "Identifier Type" + assert editor_response_data[1]["name"] == "Admin Name" + assert editor_response_data[1]["award_number"] == "award number" + assert editor_response_data[1]["award_title"] == "Award Title" + assert editor_response_data[1]["award_uri"] == "Award URI" + assert editor_response_data[1]["identifier"] == "Identifier" + assert editor_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_response_data[1]["identifier_type"] == "Identifier Type" + assert editor_response_data[2]["name"] == "Editor Name" + assert editor_response_data[2]["award_number"] == "award number" + assert editor_response_data[2]["award_title"] == "Award Title" + assert editor_response_data[2]["award_uri"] == "Award URI" + assert editor_response_data[2]["identifier"] == "Identifier" + assert editor_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_response_data[2]["identifier_type"] == "Identifier Type" + + assert viewer_response_data[0]["name"] == "Name" + assert viewer_response_data[0]["award_number"] == "award number" + assert viewer_response_data[0]["award_title"] == "Award Title" + assert viewer_response_data[0]["award_uri"] == "Award URI" + assert viewer_response_data[0]["identifier"] == "Identifier" + assert viewer_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_response_data[0]["identifier_type"] == "Identifier Type" + assert viewer_response_data[1]["name"] == "Admin Name" + assert viewer_response_data[1]["award_number"] == "award number" + assert viewer_response_data[1]["award_title"] == "Award Title" + assert viewer_response_data[1]["award_uri"] == "Award URI" + assert viewer_response_data[1]["identifier"] == "Identifier" + assert viewer_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_response_data[1]["identifier_type"] == "Identifier Type" + assert viewer_response_data[2]["name"] == "Editor Name" + assert viewer_response_data[2]["award_number"] == "award number" + assert viewer_response_data[2]["award_title"] == "Award Title" + assert viewer_response_data[2]["award_uri"] == "Award URI" + assert viewer_response_data[2]["identifier"] == "Identifier" + assert viewer_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_response_data[2]["identifier_type"] == "Identifier Type" + + +def test_delete_dataset_funder_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - other metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + funder metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + funder_id = pytest.global_dataset_funder_id + a_funder_id = pytest.global_dataset_funder_id_admin + e_funder_id = pytest.global_dataset_funder_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{a_funder_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{e_funder_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_put_other_dataset_metadata(_logged_in_client): +# ------------------- OTHER METADATA ------------------- # +def test_put_other_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' @@ -652,6 +1957,7 @@ def test_put_other_dataset_metadata(_logged_in_client): Then check that the response is valid and updates the dataset other metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -671,32 +1977,130 @@ def test_put_other_dataset_metadata(_logged_in_client): assert response_data["acknowledgement"] == "Yes" assert response_data["language"] == "English" - # assert response_data["resource_type"] == "Resource Type" # CURRENTLY NOT BEING RETURNED + # assert ( + # response_data["resource_type"] == "Resource Type" + # ) # CURRENTLY NOT BEING RETURNED assert response_data["size"] == ["Size"] assert response_data["standards_followed"] == "Standards Followed" - # ABOVE STATEMENT CURRENTLY NOT BEING UPDATED + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Admin Resource Type", + "size": ["Size"], + "standards_followed": "Standards Followed", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- PUBLICATION METADATA ------------------- # -def test_get_dataset_publisher_metadata(_logged_in_client): + assert admin_response_data["acknowledgement"] == "Yes" + assert admin_response_data["language"] == "English" + # assert admin_response_data["resource_type"] == "Admin Resource Type" + assert admin_response_data["size"] == ["Size"] + assert admin_response_data["standards_followed"] == "Standards Followed" + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Editor Resource Type", + "size": ["Size"], + "standards_followed": "Standards Followed", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["acknowledgement"] == "Yes" + assert editor_response_data["language"] == "English" + # assert editor_response_data["resource_type"] == "Editor Resource Type" + assert editor_response_data["size"] == ["Size"] + assert editor_response_data["standards_followed"] == "Standards Followed" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Viewer Resource Type", + "size": ["Size"], + "standards_followed": "Standards Followed", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_other_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - publisher metadata content + other metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -def test_put_dataset_publisher_metadata(_logged_in_client): + # Editor was the last to update the metadata successfully so + # the response should reflect that + assert response_data["acknowledgement"] == "Yes" + assert response_data["language"] == "English" + # assert response_data["resource_type"] == "Editor Resource Type" + assert response_data["size"] == ["Size"] + assert response_data["standards_followed"] == "Standards Followed" + + assert admin_response_data["acknowledgement"] == "Yes" + assert admin_response_data["language"] == "English" + # assert admin_response_data["resource_type"] == "Editor Resource Type" + assert admin_response_data["size"] == ["Size"] + assert admin_response_data["standards_followed"] == "Standards Followed" + + assert editor_response_data["acknowledgement"] == "Yes" + assert editor_response_data["language"] == "English" + # assert editor_response_data["resource_type"] == "Editor Resource Type" + assert editor_response_data["size"] == ["Size"] + assert editor_response_data["standards_followed"] == "Standards Followed" + + assert viewer_response_data["acknowledgement"] == "Yes" + assert viewer_response_data["language"] == "English" + # assert viewer_response_data["resource_type"] == "Editor Resource Type" + assert viewer_response_data["size"] == ["Size"] + assert viewer_response_data["standards_followed"] == "Standards Followed" + + +# ------------------- PUBLICATION METADATA ------------------- # +def test_put_dataset_publisher_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' @@ -704,6 +2108,7 @@ def test_put_dataset_publisher_metadata(_logged_in_client): Then check that the response is valid and updates the dataset publisher metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -725,27 +2130,141 @@ def test_put_dataset_publisher_metadata(_logged_in_client): response_data["managing_organization_ror_id"] == "Managing Organization ROR ID" ) + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + json={ + "publisher": "Publisher", + "managing_organization_name": "Managing Admin Organization Name", + "managing_organization_ror_id": "Managing Organization ROR ID", + }, + ) -# ------------------- RECORD KEYS METADATA ------------------- # -def test_get_dataset_record_keys_metadata(_logged_in_client): + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["publisher"] == "Publisher" + assert ( + admin_response_data["managing_organization_name"] + == "Managing Admin Organization Name" + ) + assert ( + admin_response_data["managing_organization_ror_id"] + == "Managing Organization ROR ID" + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + json={ + "publisher": "Publisher", + "managing_organization_name": "Managing Editor Organization Name", + "managing_organization_ror_id": "Managing Organization ROR ID", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["publisher"] == "Publisher" + assert ( + editor_response_data["managing_organization_name"] + == "Managing Editor Organization Name" + ) + assert ( + editor_response_data["managing_organization_ror_id"] + == "Managing Organization ROR ID" + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + json={ + "publisher": "Publisher", + "managing_organization_name": "Managing Viewer Organization Name", + "managing_organization_ror_id": "Managing Organization ROR ID", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_publisher_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - record keys metadata content + publisher metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -def test_put_dataset_record_keys_metadata(_logged_in_client): + # Editor was the last to update the metadata successfully so + # the response should reflect that + assert response_data["publisher"] == "Publisher" + assert ( + response_data["managing_organization_name"] + == "Managing Editor Organization Name" + ) + assert ( + response_data["managing_organization_ror_id"] == "Managing Organization ROR ID" + ) + + assert admin_response_data["publisher"] == "Publisher" + assert ( + admin_response_data["managing_organization_name"] + == "Managing Editor Organization Name" + ) + assert ( + admin_response_data["managing_organization_ror_id"] + == "Managing Organization ROR ID" + ) + + assert editor_response_data["publisher"] == "Publisher" + assert ( + editor_response_data["managing_organization_name"] + == "Managing Editor Organization Name" + ) + assert ( + editor_response_data["managing_organization_ror_id"] + == "Managing Organization ROR ID" + ) + + assert viewer_response_data["publisher"] == "Publisher" + assert ( + viewer_response_data["managing_organization_name"] + == "Managing Editor Organization Name" + ) + assert ( + viewer_response_data["managing_organization_ror_id"] + == "Managing Organization ROR ID" + ) + + +# ------------------- RECORD KEYS METADATA ------------------- # +def test_put_dataset_record_keys_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' @@ -753,6 +2272,7 @@ def test_put_dataset_record_keys_metadata(_logged_in_client): Then check that the response is valid and updates the dataset record keys metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -766,27 +2286,88 @@ def test_put_dataset_record_keys_metadata(_logged_in_client): assert response_data["type"] == "Record Type" assert response_data["details"] == "Details for Record Keys" + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", + json={"type": "Record Type", "details": "Admin Details for Record Keys"}, + ) -# ------------------- RELATED ITEM METADATA ------------------- # -def test_get_dataset_related_item_metadata(_logged_in_client): + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["type"] == "Record Type" + assert admin_response_data["details"] == "Admin Details for Record Keys" + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", + json={"type": "Record Type", "details": "Editor Details for Record Keys"}, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["type"] == "Record Type" + assert editor_response_data["details"] == "Editor Details for Record Keys" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", + json={"type": "Record Type", "details": "Viewer Details for Record Keys"}, + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_record_keys_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - related item metadata content + record keys metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # Editor was the last to update the metadata successfully so + # the response should reflect that + assert response_data["type"] == "Record Type" + assert response_data["details"] == "Editor Details for Record Keys" + assert admin_response_data["type"] == "Record Type" + assert admin_response_data["details"] == "Editor Details for Record Keys" -def test_post_dataset_related_item_metadata(_logged_in_client): + assert editor_response_data["type"] == "Record Type" + assert editor_response_data["details"] == "Editor Details for Record Keys" + + assert viewer_response_data["type"] == "Record Type" + assert viewer_response_data["details"] == "Editor Details for Record Keys" + + +# ------------------- RELATED ITEM METADATA ------------------- # +def test_post_dataset_related_item_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-item' @@ -794,6 +2375,7 @@ def test_post_dataset_related_item_metadata(_logged_in_client): Then check that the response is valid and creates the dataset related item metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -836,9 +2418,28 @@ def test_post_dataset_related_item_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) + # seach for main title and subtitle index in response_data[n]["titles"] + main_title_0 = next( + ( + index + for (index, d) in enumerate(response_data[0]["titles"]) + if d["type"] == "MainTitle" + ), + None, + ) + sub_title_0 = next( + ( + index + for (index, d) in enumerate(response_data[0]["titles"]) + if d["type"] == "Subtitle" + ), + None, + ) pytest.global_dataset_related_item_id = response_data[0]["id"] pytest.global_dataset_related_item_contributor_id = response_data[0][ "contributors" @@ -849,7 +2450,357 @@ def test_post_dataset_related_item_metadata(_logged_in_client): pytest.global_dataset_related_item_identifier_id = response_data[0]["identifiers"][ 0 ]["id"] - pytest.global_dataset_related_item_title_id = response_data[0]["titles"][0]["id"] + # pylint: disable=line-too-long + pytest.global_dataset_related_item_main_title_id = response_data[0]["titles"][ + main_title_0 + ]["id"] + pytest.global_dataset_related_item_sub_title_id = response_data[0]["titles"][ + sub_title_0 + ]["id"] + + assert response_data[0]["contributors"][0]["name"] == "Ndafsdame" + assert response_data[0]["contributors"][0]["contributor_type"] == "Con Type" + assert response_data[0]["contributors"][0]["name_type"] == "Personal" + assert response_data[0]["creators"][0]["name"] == "Name" + assert response_data[0]["creators"][0]["name_type"] == "Personal" + assert response_data[0]["edition"] == "Edition" + assert response_data[0]["first_page"] == "First Page" + assert response_data[0]["identifiers"][0]["identifier"] == "Identifier" + assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" + assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" + assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" + assert response_data[0]["identifiers"][0]["type"] == "ARK" + assert response_data[0]["issue"] == "Issue" + assert response_data[0]["last_page"] == "Last Page" + assert response_data[0]["number_type"] == "Number Type" + assert response_data[0]["number_value"] == "Number Value" + assert response_data[0]["publication_year"] == 2013 + assert response_data[0]["publisher"] == "Publisher" + assert response_data[0]["relation_type"] == "Relation Type" + assert response_data[0]["titles"][main_title_0]["title"] == "Title" + assert response_data[0]["titles"][main_title_0]["type"] == "MainTitle" + assert response_data[0]["titles"][sub_title_0]["title"] == "Title" + assert response_data[0]["titles"][sub_title_0]["type"] == "Subtitle" + assert response_data[0]["type"] == "Type" + assert response_data[0]["volume"] == "Volume" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + json=[ + { + "contributors": [ + { + "name": "Admin Ndafsdame", + "contributor_type": "Admin Con Type", + "name_type": "Personal", + } + ], + "creators": [{"name": "Admin Name", "name_type": "Personal"}], + "edition": "Admin Edition", + "first_page": "Admin First Page", + "identifiers": [ + { + "identifier": "Admin Identifier", + "metadata_scheme": "Admin Metadata Scheme", + "scheme_type": "Admin Scheme Type", + "scheme_uri": "Admin Scheme URI", + "type": "ARK", + } + ], + "issue": "Admin Issue", + "last_page": "Admin Last Page", + "number_type": "Admin Number Type", + "number_value": "Admin Number Value", + "publication_year": 2013, + "publisher": "Admin Publisher", + "relation_type": "Admin Relation Type", + "titles": [{"title": "Admin Title", "type": "AlternativeTitle"}], + "type": "Admin Type", + "volume": "Admin Volume", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_related_item_id_admin = admin_response_data[1]["id"] + pytest.global_dataset_related_item_contributor_id_admin = admin_response_data[1][ + "contributors" + ][0]["id"] + pytest.global_dataset_related_item_creator_id_admin = admin_response_data[1][ + "creators" + ][0]["id"] + pytest.global_dataset_related_item_identifier_id_admin = admin_response_data[1][ + "identifiers" + ][0]["id"] + pytest.global_dataset_related_item_title_id_admin = admin_response_data[1][ + "titles" + ][0]["id"] + + assert admin_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" + assert ( + admin_response_data[1]["contributors"][0]["contributor_type"] + == "Admin Con Type" + ) + assert admin_response_data[1]["contributors"][0]["name_type"] == "Personal" + assert admin_response_data[1]["creators"][0]["name"] == "Admin Name" + assert admin_response_data[1]["creators"][0]["name_type"] == "Personal" + assert admin_response_data[1]["edition"] == "Admin Edition" + assert admin_response_data[1]["first_page"] == "Admin First Page" + assert admin_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" + assert ( + admin_response_data[1]["identifiers"][0]["metadata_scheme"] + == "Admin Metadata Scheme" + ) + assert ( + admin_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" + ) + assert admin_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" + assert admin_response_data[1]["identifiers"][0]["type"] == "ARK" + assert admin_response_data[1]["issue"] == "Admin Issue" + assert admin_response_data[1]["last_page"] == "Admin Last Page" + assert admin_response_data[1]["number_type"] == "Admin Number Type" + assert admin_response_data[1]["number_value"] == "Admin Number Value" + assert admin_response_data[1]["publication_year"] == 2013 + assert admin_response_data[1]["publisher"] == "Admin Publisher" + assert admin_response_data[1]["relation_type"] == "Admin Relation Type" + assert admin_response_data[1]["titles"][0]["title"] == "Admin Title" + assert admin_response_data[1]["titles"][0]["type"] == "AlternativeTitle" + assert admin_response_data[1]["type"] == "Admin Type" + assert admin_response_data[1]["volume"] == "Admin Volume" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + json=[ + { + "contributors": [ + { + "name": "Editor Ndafsdame", + "contributor_type": "Editor Con Type", + "name_type": "Personal", + } + ], + "creators": [{"name": "Editor Name", "name_type": "Personal"}], + "edition": "Editor Edition", + "first_page": "Editor First Page", + "identifiers": [ + { + "identifier": "Editor Identifier", + "metadata_scheme": "Editor Metadata Scheme", + "scheme_type": "Editor Scheme Type", + "scheme_uri": "Editor Scheme URI", + "type": "ARK", + } + ], + "issue": "Editor Issue", + "last_page": "Editor Last Page", + "number_type": "Editor Number Type", + "number_value": "Editor Number Value", + "publication_year": 2013, + "publisher": "Editor Publisher", + "relation_type": "Editor Relation Type", + "titles": [{"title": "Editor Title", "type": "Subtitle"}], + "type": "Editor Type", + "volume": "Editor Volume", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_related_item_id_editor = editor_response_data[2]["id"] + pytest.global_dataset_related_item_contributor_id_editor = editor_response_data[2][ + "contributors" + ][0]["id"] + pytest.global_dataset_related_item_creator_id_editor = editor_response_data[2][ + "creators" + ][0]["id"] + pytest.global_dataset_related_item_identifier_id_editor = editor_response_data[2][ + "identifiers" + ][0]["id"] + pytest.global_dataset_related_item_title_id_editor = editor_response_data[2][ + "titles" + ][0]["id"] + + assert editor_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" + assert ( + editor_response_data[2]["contributors"][0]["contributor_type"] + == "Editor Con Type" + ) + assert editor_response_data[2]["contributors"][0]["name_type"] == "Personal" + assert editor_response_data[2]["creators"][0]["name"] == "Editor Name" + assert editor_response_data[2]["creators"][0]["name_type"] == "Personal" + assert editor_response_data[2]["edition"] == "Editor Edition" + assert editor_response_data[2]["first_page"] == "Editor First Page" + assert ( + editor_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" + ) + assert ( + editor_response_data[2]["identifiers"][0]["metadata_scheme"] + == "Editor Metadata Scheme" + ) + assert ( + editor_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" + ) + assert ( + editor_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" + ) + assert editor_response_data[2]["identifiers"][0]["type"] == "ARK" + assert editor_response_data[2]["issue"] == "Editor Issue" + assert editor_response_data[2]["last_page"] == "Editor Last Page" + assert editor_response_data[2]["number_type"] == "Editor Number Type" + assert editor_response_data[2]["number_value"] == "Editor Number Value" + assert editor_response_data[2]["publication_year"] == 2013 + assert editor_response_data[2]["publisher"] == "Editor Publisher" + assert editor_response_data[2]["relation_type"] == "Editor Relation Type" + assert editor_response_data[2]["titles"][0]["title"] == "Editor Title" + assert editor_response_data[2]["titles"][0]["type"] == "Subtitle" + assert editor_response_data[2]["type"] == "Editor Type" + assert editor_response_data[2]["volume"] == "Editor Volume" + + viewer_client = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + json=[ + { + "contributors": [ + { + "name": "Viewer Ndafsdame", + "contributor_type": "Viewer Con Type", + "name_type": "Personal", + } + ], + "creators": [{"name": "Viewer Name", "name_type": "Personal"}], + "edition": "Viewer Edition", + "first_page": "Viewer First Page", + "identifiers": [ + { + "identifier": "Viewer Identifier", + "metadata_scheme": "Viewer Metadata Scheme", + "scheme_type": "Viewer Scheme Type", + "scheme_uri": "Viewer Scheme URI", + "type": "ARK", + } + ], + "issue": "Viewer Issue", + "last_page": "Viewer Last Page", + "number_type": "Viewer Number Type", + "number_value": "Viewer Number Value", + "publication_year": 2013, + "publisher": "Viewer Publisher", + "relation_type": "Viewer Relation Type", + "titles": [{"title": "Viewer Title", "type": "Subtitle"}], + "type": "Viewer Type", + "volume": "Viewer Volume", + } + ], + ) + + assert viewer_client.status_code == 403 + + +def test_get_dataset_related_item_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + related item metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + # seach for main title and subtitle index in response_data[n]["titles"] + # pylint: disable=line-too-long + main_title_0 = next( + ( + index + for (index, d) in enumerate(response_data[0]["titles"]) + if d["type"] == "MainTitle" + ), + None, + ) + sub_title_0 = next( + ( + index + for (index, d) in enumerate(response_data[0]["titles"]) + if d["type"] == "Subtitle" + ), + None, + ) + a_main_title_0 = next( + ( + index + for (index, d) in enumerate(admin_response_data[0]["titles"]) + if d["type"] == "MainTitle" + ), + None, + ) + a_sub_title_0 = next( + ( + index + for (index, d) in enumerate(admin_response_data[0]["titles"]) + if d["type"] == "Subtitle" + ), + None, + ) + e_main_title_0 = next( + ( + index + for (index, d) in enumerate(editor_response_data[0]["titles"]) + if d["type"] == "MainTitle" + ), + None, + ) + e_sub_title_0 = next( + ( + index + for (index, d) in enumerate(editor_response_data[0]["titles"]) + if d["type"] == "Subtitle" + ), + None, + ) + v_main_title_0 = next( + ( + index + for (index, d) in enumerate(viewer_response_data[0]["titles"]) + if d["type"] == "MainTitle" + ), + None, + ) + v_sub_title_0 = next( + ( + index + for (index, d) in enumerate(viewer_response_data[0]["titles"]) + if d["type"] == "Subtitle" + ), + None, + ) assert response_data[0]["contributors"][0]["name"] == "Ndafsdame" assert response_data[0]["contributors"][0]["contributor_type"] == "Con Type" @@ -870,15 +2821,346 @@ def test_post_dataset_related_item_metadata(_logged_in_client): assert response_data[0]["publication_year"] == 2013 assert response_data[0]["publisher"] == "Publisher" assert response_data[0]["relation_type"] == "Relation Type" - assert response_data[0]["titles"][0]["title"] == "Title" - assert response_data[0]["titles"][0]["type"] == "MainTitle" - assert response_data[0]["titles"][1]["title"] == "Title" - assert response_data[0]["titles"][1]["type"] == "Subtitle" + assert response_data[0]["titles"][main_title_0]["title"] == "Title" + assert response_data[0]["titles"][main_title_0]["type"] == "MainTitle" + assert response_data[0]["titles"][sub_title_0]["title"] == "Title" + assert response_data[0]["titles"][sub_title_0]["type"] == "Subtitle" assert response_data[0]["type"] == "Type" assert response_data[0]["volume"] == "Volume" + assert response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" + assert response_data[1]["contributors"][0]["contributor_type"] == "Admin Con Type" + assert response_data[1]["contributors"][0]["name_type"] == "Personal" + assert response_data[1]["creators"][0]["name"] == "Admin Name" + assert response_data[1]["creators"][0]["name_type"] == "Personal" + assert response_data[1]["edition"] == "Admin Edition" + assert response_data[1]["first_page"] == "Admin First Page" + assert response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" + assert ( + response_data[1]["identifiers"][0]["metadata_scheme"] == "Admin Metadata Scheme" + ) + assert response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" + assert response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" + assert response_data[1]["identifiers"][0]["type"] == "ARK" + assert response_data[1]["issue"] == "Admin Issue" + assert response_data[1]["last_page"] == "Admin Last Page" + assert response_data[1]["number_type"] == "Admin Number Type" + assert response_data[1]["number_value"] == "Admin Number Value" + assert response_data[1]["publication_year"] == 2013 + assert response_data[1]["publisher"] == "Admin Publisher" + assert response_data[1]["relation_type"] == "Admin Relation Type" + assert response_data[1]["titles"][0]["title"] == "Admin Title" + assert response_data[1]["titles"][0]["type"] == "AlternativeTitle" + assert response_data[1]["type"] == "Admin Type" + assert response_data[1]["volume"] == "Admin Volume" + assert response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" + assert response_data[2]["contributors"][0]["contributor_type"] == "Editor Con Type" + assert response_data[2]["contributors"][0]["name_type"] == "Personal" + assert response_data[2]["creators"][0]["name"] == "Editor Name" + assert response_data[2]["creators"][0]["name_type"] == "Personal" + assert response_data[2]["edition"] == "Editor Edition" + assert response_data[2]["first_page"] == "Editor First Page" + assert response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" + assert ( + response_data[2]["identifiers"][0]["metadata_scheme"] + == "Editor Metadata Scheme" + ) + assert response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" + assert response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" + assert response_data[2]["identifiers"][0]["type"] == "ARK" + assert response_data[2]["issue"] == "Editor Issue" + assert response_data[2]["last_page"] == "Editor Last Page" + assert response_data[2]["number_type"] == "Editor Number Type" + assert response_data[2]["number_value"] == "Editor Number Value" + assert response_data[2]["publication_year"] == 2013 + assert response_data[2]["publisher"] == "Editor Publisher" + assert response_data[2]["relation_type"] == "Editor Relation Type" + assert response_data[2]["titles"][0]["title"] == "Editor Title" + assert response_data[2]["titles"][0]["type"] == "Subtitle" + assert response_data[2]["type"] == "Editor Type" + assert response_data[2]["volume"] == "Editor Volume" + + assert admin_response_data[0]["contributors"][0]["name"] == "Ndafsdame" + assert admin_response_data[0]["contributors"][0]["contributor_type"] == "Con Type" + assert admin_response_data[0]["contributors"][0]["name_type"] == "Personal" + assert admin_response_data[0]["creators"][0]["name"] == "Name" + assert admin_response_data[0]["creators"][0]["name_type"] == "Personal" + assert admin_response_data[0]["edition"] == "Edition" + assert admin_response_data[0]["first_page"] == "First Page" + assert admin_response_data[0]["identifiers"][0]["identifier"] == "Identifier" + assert ( + admin_response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" + ) + assert admin_response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" + assert admin_response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" + assert admin_response_data[0]["identifiers"][0]["type"] == "ARK" + assert admin_response_data[0]["issue"] == "Issue" + assert admin_response_data[0]["last_page"] == "Last Page" + assert admin_response_data[0]["number_type"] == "Number Type" + assert admin_response_data[0]["number_value"] == "Number Value" + assert admin_response_data[0]["publication_year"] == 2013 + assert admin_response_data[0]["publisher"] == "Publisher" + assert admin_response_data[0]["relation_type"] == "Relation Type" + assert admin_response_data[0]["titles"][a_main_title_0]["title"] == "Title" + assert admin_response_data[0]["titles"][a_main_title_0]["type"] == "MainTitle" + assert admin_response_data[0]["titles"][a_sub_title_0]["title"] == "Title" + assert admin_response_data[0]["titles"][a_sub_title_0]["type"] == "Subtitle" + assert admin_response_data[0]["type"] == "Type" + assert admin_response_data[0]["volume"] == "Volume" + assert admin_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" + assert ( + admin_response_data[1]["contributors"][0]["contributor_type"] + == "Admin Con Type" + ) + assert admin_response_data[1]["contributors"][0]["name_type"] == "Personal" + assert admin_response_data[1]["creators"][0]["name"] == "Admin Name" + assert admin_response_data[1]["creators"][0]["name_type"] == "Personal" + assert admin_response_data[1]["edition"] == "Admin Edition" + assert admin_response_data[1]["first_page"] == "Admin First Page" + assert admin_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" + assert ( + admin_response_data[1]["identifiers"][0]["metadata_scheme"] + == "Admin Metadata Scheme" + ) + assert ( + admin_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" + ) + assert admin_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" + assert admin_response_data[1]["identifiers"][0]["type"] == "ARK" + assert admin_response_data[1]["issue"] == "Admin Issue" + assert admin_response_data[1]["last_page"] == "Admin Last Page" + assert admin_response_data[1]["number_type"] == "Admin Number Type" + assert admin_response_data[1]["number_value"] == "Admin Number Value" + assert admin_response_data[1]["publication_year"] == 2013 + assert admin_response_data[1]["publisher"] == "Admin Publisher" + assert admin_response_data[1]["relation_type"] == "Admin Relation Type" + assert admin_response_data[1]["titles"][0]["title"] == "Admin Title" + assert admin_response_data[1]["titles"][0]["type"] == "AlternativeTitle" + assert admin_response_data[1]["type"] == "Admin Type" + assert admin_response_data[1]["volume"] == "Admin Volume" + assert admin_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" + assert ( + admin_response_data[2]["contributors"][0]["contributor_type"] + == "Editor Con Type" + ) + assert admin_response_data[2]["contributors"][0]["name_type"] == "Personal" + assert admin_response_data[2]["creators"][0]["name"] == "Editor Name" + assert admin_response_data[2]["creators"][0]["name_type"] == "Personal" + assert admin_response_data[2]["edition"] == "Editor Edition" + assert admin_response_data[2]["first_page"] == "Editor First Page" + assert admin_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" + assert ( + admin_response_data[2]["identifiers"][0]["metadata_scheme"] + == "Editor Metadata Scheme" + ) + assert ( + admin_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" + ) + assert admin_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" + assert admin_response_data[2]["identifiers"][0]["type"] == "ARK" + assert admin_response_data[2]["issue"] == "Editor Issue" + assert admin_response_data[2]["last_page"] == "Editor Last Page" + assert admin_response_data[2]["number_type"] == "Editor Number Type" + assert admin_response_data[2]["number_value"] == "Editor Number Value" + assert admin_response_data[2]["publication_year"] == 2013 + assert admin_response_data[2]["publisher"] == "Editor Publisher" + assert admin_response_data[2]["relation_type"] == "Editor Relation Type" + assert admin_response_data[2]["titles"][0]["title"] == "Editor Title" + assert admin_response_data[2]["titles"][0]["type"] == "Subtitle" + assert admin_response_data[2]["type"] == "Editor Type" + assert admin_response_data[2]["volume"] == "Editor Volume" + + assert editor_response_data[0]["contributors"][0]["name"] == "Ndafsdame" + assert editor_response_data[0]["contributors"][0]["contributor_type"] == "Con Type" + assert editor_response_data[0]["contributors"][0]["name_type"] == "Personal" + assert editor_response_data[0]["creators"][0]["name"] == "Name" + assert editor_response_data[0]["creators"][0]["name_type"] == "Personal" + assert editor_response_data[0]["edition"] == "Edition" + assert editor_response_data[0]["first_page"] == "First Page" + assert editor_response_data[0]["identifiers"][0]["identifier"] == "Identifier" + assert ( + editor_response_data[0]["identifiers"][0]["metadata_scheme"] + == "Metadata Scheme" + ) + assert editor_response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" + assert editor_response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" + assert editor_response_data[0]["identifiers"][0]["type"] == "ARK" + assert editor_response_data[0]["issue"] == "Issue" + assert editor_response_data[0]["last_page"] == "Last Page" + assert editor_response_data[0]["number_type"] == "Number Type" + assert editor_response_data[0]["number_value"] == "Number Value" + assert editor_response_data[0]["publication_year"] == 2013 + assert editor_response_data[0]["publisher"] == "Publisher" + assert editor_response_data[0]["relation_type"] == "Relation Type" + assert editor_response_data[0]["titles"][e_main_title_0]["title"] == "Title" + assert editor_response_data[0]["titles"][e_main_title_0]["type"] == "MainTitle" + assert editor_response_data[0]["titles"][e_sub_title_0]["title"] == "Title" + assert editor_response_data[0]["titles"][e_sub_title_0]["type"] == "Subtitle" + assert editor_response_data[0]["type"] == "Type" + assert editor_response_data[0]["volume"] == "Volume" + assert editor_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" + assert ( + editor_response_data[1]["contributors"][0]["contributor_type"] + == "Admin Con Type" + ) + assert editor_response_data[1]["contributors"][0]["name_type"] == "Personal" + assert editor_response_data[1]["creators"][0]["name"] == "Admin Name" + assert editor_response_data[1]["creators"][0]["name_type"] == "Personal" + assert editor_response_data[1]["edition"] == "Admin Edition" + assert editor_response_data[1]["first_page"] == "Admin First Page" + assert editor_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" + assert ( + editor_response_data[1]["identifiers"][0]["metadata_scheme"] + == "Admin Metadata Scheme" + ) + assert ( + editor_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" + ) + assert editor_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" + assert editor_response_data[1]["identifiers"][0]["type"] == "ARK" + assert editor_response_data[1]["issue"] == "Admin Issue" + assert editor_response_data[1]["last_page"] == "Admin Last Page" + assert editor_response_data[1]["number_type"] == "Admin Number Type" + assert editor_response_data[1]["number_value"] == "Admin Number Value" + assert editor_response_data[1]["publication_year"] == 2013 + assert editor_response_data[1]["publisher"] == "Admin Publisher" + assert editor_response_data[1]["relation_type"] == "Admin Relation Type" + assert editor_response_data[1]["titles"][0]["title"] == "Admin Title" + assert editor_response_data[1]["titles"][0]["type"] == "AlternativeTitle" + assert editor_response_data[1]["type"] == "Admin Type" + assert editor_response_data[1]["volume"] == "Admin Volume" + assert editor_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" + assert ( + editor_response_data[2]["contributors"][0]["contributor_type"] + == "Editor Con Type" + ) + assert editor_response_data[2]["contributors"][0]["name_type"] == "Personal" + assert editor_response_data[2]["creators"][0]["name"] == "Editor Name" + assert editor_response_data[2]["creators"][0]["name_type"] == "Personal" + assert editor_response_data[2]["edition"] == "Editor Edition" + assert editor_response_data[2]["first_page"] == "Editor First Page" + assert ( + editor_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" + ) + assert ( + editor_response_data[2]["identifiers"][0]["metadata_scheme"] + == "Editor Metadata Scheme" + ) + assert ( + editor_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" + ) + assert ( + editor_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" + ) + assert editor_response_data[2]["identifiers"][0]["type"] == "ARK" + assert editor_response_data[2]["issue"] == "Editor Issue" + assert editor_response_data[2]["last_page"] == "Editor Last Page" + assert editor_response_data[2]["number_type"] == "Editor Number Type" + assert editor_response_data[2]["number_value"] == "Editor Number Value" + assert editor_response_data[2]["publication_year"] == 2013 + assert editor_response_data[2]["publisher"] == "Editor Publisher" + assert editor_response_data[2]["relation_type"] == "Editor Relation Type" + assert editor_response_data[2]["titles"][0]["title"] == "Editor Title" + assert editor_response_data[2]["titles"][0]["type"] == "Subtitle" + assert editor_response_data[2]["type"] == "Editor Type" + assert editor_response_data[2]["volume"] == "Editor Volume" + + assert viewer_response_data[0]["contributors"][0]["name"] == "Ndafsdame" + assert viewer_response_data[0]["contributors"][0]["contributor_type"] == "Con Type" + assert viewer_response_data[0]["contributors"][0]["name_type"] == "Personal" + assert viewer_response_data[0]["creators"][0]["name"] == "Name" + assert viewer_response_data[0]["creators"][0]["name_type"] == "Personal" + assert viewer_response_data[0]["edition"] == "Edition" + assert viewer_response_data[0]["first_page"] == "First Page" + assert viewer_response_data[0]["identifiers"][0]["identifier"] == "Identifier" + assert ( + viewer_response_data[0]["identifiers"][0]["metadata_scheme"] + == "Metadata Scheme" + ) + assert viewer_response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" + assert viewer_response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" + assert viewer_response_data[0]["identifiers"][0]["type"] == "ARK" + assert viewer_response_data[0]["issue"] == "Issue" + assert viewer_response_data[0]["last_page"] == "Last Page" + assert viewer_response_data[0]["number_type"] == "Number Type" + assert viewer_response_data[0]["number_value"] == "Number Value" + assert viewer_response_data[0]["publication_year"] == 2013 + assert viewer_response_data[0]["publisher"] == "Publisher" + assert viewer_response_data[0]["relation_type"] == "Relation Type" + assert viewer_response_data[0]["titles"][v_main_title_0]["title"] == "Title" + assert viewer_response_data[0]["titles"][v_main_title_0]["type"] == "MainTitle" + assert viewer_response_data[0]["titles"][v_sub_title_0]["title"] == "Title" + assert viewer_response_data[0]["titles"][v_sub_title_0]["type"] == "Subtitle" + assert viewer_response_data[0]["type"] == "Type" + assert viewer_response_data[0]["volume"] == "Volume" + assert viewer_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" + assert ( + viewer_response_data[1]["contributors"][0]["contributor_type"] + == "Admin Con Type" + ) + assert viewer_response_data[1]["contributors"][0]["name_type"] == "Personal" + assert viewer_response_data[1]["creators"][0]["name"] == "Admin Name" + assert viewer_response_data[1]["creators"][0]["name_type"] == "Personal" + assert viewer_response_data[1]["edition"] == "Admin Edition" + assert viewer_response_data[1]["first_page"] == "Admin First Page" + assert viewer_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" + assert ( + viewer_response_data[1]["identifiers"][0]["metadata_scheme"] + == "Admin Metadata Scheme" + ) + assert ( + viewer_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" + ) + assert viewer_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" + assert viewer_response_data[1]["identifiers"][0]["type"] == "ARK" + assert viewer_response_data[1]["issue"] == "Admin Issue" + assert viewer_response_data[1]["last_page"] == "Admin Last Page" + assert viewer_response_data[1]["number_type"] == "Admin Number Type" + assert viewer_response_data[1]["number_value"] == "Admin Number Value" + assert viewer_response_data[1]["publication_year"] == 2013 + assert viewer_response_data[1]["publisher"] == "Admin Publisher" + assert viewer_response_data[1]["relation_type"] == "Admin Relation Type" + assert viewer_response_data[1]["titles"][0]["title"] == "Admin Title" + assert viewer_response_data[1]["titles"][0]["type"] == "AlternativeTitle" + assert viewer_response_data[1]["type"] == "Admin Type" + assert viewer_response_data[1]["volume"] == "Admin Volume" + assert viewer_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" + assert ( + viewer_response_data[2]["contributors"][0]["contributor_type"] + == "Editor Con Type" + ) + assert viewer_response_data[2]["contributors"][0]["name_type"] == "Personal" + assert viewer_response_data[2]["creators"][0]["name"] == "Editor Name" + assert viewer_response_data[2]["creators"][0]["name_type"] == "Personal" + assert viewer_response_data[2]["edition"] == "Editor Edition" + assert viewer_response_data[2]["first_page"] == "Editor First Page" + assert ( + viewer_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" + ) + assert ( + viewer_response_data[2]["identifiers"][0]["metadata_scheme"] + == "Editor Metadata Scheme" + ) + assert ( + viewer_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" + ) + assert ( + viewer_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" + ) + assert viewer_response_data[2]["identifiers"][0]["type"] == "ARK" + assert viewer_response_data[2]["issue"] == "Editor Issue" + assert viewer_response_data[2]["last_page"] == "Editor Last Page" + assert viewer_response_data[2]["number_type"] == "Editor Number Type" + assert viewer_response_data[2]["number_value"] == "Editor Number Value" + assert viewer_response_data[2]["publication_year"] == 2013 + assert viewer_response_data[2]["publisher"] == "Editor Publisher" + assert viewer_response_data[2]["relation_type"] == "Editor Relation Type" + assert viewer_response_data[2]["titles"][0]["title"] == "Editor Title" + assert viewer_response_data[2]["titles"][0]["type"] == "Subtitle" + assert viewer_response_data[2]["type"] == "Editor Type" + assert viewer_response_data[2]["volume"] == "Editor Volume" -def test_delete_dataset_related_item_contributor_metadata(_logged_in_client): +def test_delete_dataset_related_item_contributor_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}' @@ -886,26 +3168,38 @@ def test_delete_dataset_related_item_contributor_metadata(_logged_in_client): Then check that the response is valid and deletes the dataset related item metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id related_item_id = pytest.global_dataset_related_item_id contributor_id = pytest.global_dataset_related_item_contributor_id + admin_con_id = pytest.global_dataset_related_item_contributor_id_admin + editor_con_id = pytest.global_dataset_related_item_contributor_id_editor + # pylint: disable=line-too-long + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{contributor_id}" + ) # pylint: disable=line-too-long response = _logged_in_client.delete( f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{contributor_id}" ) + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{admin_con_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{editor_con_id}" + ) + assert viewer_response.status_code == 403 assert response.status_code == 204 - - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" - ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)[0]["contributors"]) == 0 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_delete_dataset_related_item_creator_metadata(_logged_in_client): +def test_delete_dataset_related_item_creator_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}' @@ -913,26 +3207,38 @@ def test_delete_dataset_related_item_creator_metadata(_logged_in_client): Then check that the response is valid and deletes the dataset related item metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id related_item_id = pytest.global_dataset_related_item_id creator_id = pytest.global_dataset_related_item_creator_id + admin_creator_id = pytest.global_dataset_related_item_creator_id_admin + editor_creator_id = pytest.global_dataset_related_item_creator_id_editor + # pylint: disable=line-too-long + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{creator_id}" + ) # pylint: disable=line-too-long response = _logged_in_client.delete( f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{creator_id}" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{admin_creator_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{editor_creator_id}" ) - assert len(json.loads(response_get.data)[0]["creators"]) == 0 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_delete_dataset_related_item_identifier_metadata(_logged_in_client): +def test_delete_dataset_related_item_identifier_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}' @@ -940,27 +3246,38 @@ def test_delete_dataset_related_item_identifier_metadata(_logged_in_client): Then check that the response is valid and deletes the dataset related item metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id related_item_id = pytest.global_dataset_related_item_id identifier_id = pytest.global_dataset_related_item_identifier_id + admin_id_id = pytest.global_dataset_related_item_identifier_id_admin + editor_id_id = pytest.global_dataset_related_item_identifier_id_editor + # pylint: disable=line-too-long + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{identifier_id}" + ) # pylint: disable=line-too-long response = _logged_in_client.delete( f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{identifier_id}" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{admin_id_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{editor_id_id}" ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)[0]["identifiers"]) == 0 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_delete_dataset_related_item_title_metadata(_logged_in_client): +def test_delete_dataset_related_item_title_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}' @@ -968,41 +3285,42 @@ def test_delete_dataset_related_item_title_metadata(_logged_in_client): Then check that the response is valid and deletes the dataset related item metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id related_item_id = pytest.global_dataset_related_item_id - # title_id = pytest.global_dataset_related_item_title_id + main_title_id = pytest.global_dataset_related_item_main_title_id + sub_title_id = pytest.global_dataset_related_item_sub_title_id + admin_t_id = pytest.global_dataset_related_item_title_id_admin + editor_t_id = pytest.global_dataset_related_item_title_id_editor + # pylint: disable=line-too-long - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{main_title_id}" ) - - # titles_to_delete = [ - # # f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{title_id}" - # for i in json.loads(response_get.data)[0]["titles"] - # if i["type"] != "MainTitle" - # ] - - for i in json.loads(response_get.data)[0]["titles"]: - if i["type"] != "MainTitle": - t_id = i["id"] - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{t_id}" - ) - assert response.status_code == 204 - - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + main_response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{main_title_id}" ) - - assert response_get.status_code == 200 - assert ( - len(json.loads(response_get.data)[0]["titles"]) == 1 - and json.loads(response_get.data)[0]["titles"][0]["type"] == "MainTitle" + sub_response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{sub_title_id}" + ) + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{admin_t_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{editor_t_id}" ) + assert viewer_response.status_code == 403 + assert main_response.status_code == 403 # Main title cannot be deleted + assert sub_response.status_code == 204 # Main title cannot be deleted + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_delete_dataset_related_item_metadata(_logged_in_client): + +def test_delete_dataset_related_item_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}' @@ -1010,43 +3328,34 @@ def test_delete_dataset_related_item_metadata(_logged_in_client): Then check that the response is valid and deletes the dataset related item metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id related_item_id = pytest.global_dataset_related_item_id + admin_ri_id = pytest.global_dataset_related_item_id_admin + editor_ri_id = pytest.global_dataset_related_item_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}" ) - - assert response.status_code == 204 - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{admin_ri_id}" ) - - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 0 - - -# ------------------- RIGHTS METADATA ------------------- # -def test_get_dataset_rights_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - rights metadata content - """ - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{editor_ri_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_post_dataset_rights_metadata(_logged_in_client): +# ------------------- RIGHTS METADATA ------------------- # +def test_post_dataset_rights_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study' @@ -1054,6 +3363,7 @@ def test_post_dataset_rights_metadata(_logged_in_client): Then check that the response is valid and creates the dataset rights metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -1068,6 +3378,8 @@ def test_post_dataset_rights_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -1078,53 +3390,189 @@ def test_post_dataset_rights_metadata(_logged_in_client): assert response_data[0]["rights"] == "Rights" assert response_data[0]["uri"] == "URI" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", + json=[ + { + "identifier": "Admin Identifier", + "identifier_scheme": "Admin Identifier Scheme", + "rights": "Admin Rights", + "uri": "Admin URI", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_rights_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "Admin Identifier" + assert admin_response_data[0]["identifier_scheme"] == "Admin Identifier Scheme" + assert admin_response_data[0]["rights"] == "Admin Rights" + assert admin_response_data[0]["uri"] == "Admin URI" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", + json=[ + { + "identifier": "Editor Identifier", + "identifier_scheme": "Editor Identifier Scheme", + "rights": "Editor Rights", + "uri": "Editor URI", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_rights_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["identifier"] == "Editor Identifier" + assert editor_response_data[0]["identifier_scheme"] == "Editor Identifier Scheme" + assert editor_response_data[0]["rights"] == "Editor Rights" + assert editor_response_data[0]["uri"] == "Editor URI" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", + json=[ + { + "identifier": "Viewer Identifier", + "identifier_scheme": "Viewer Identifier Scheme", + "rights": "Viewer Rights", + "uri": "Viewer URI", + } + ], + ) + + assert viewer_response.status_code == 403 -def test_delete_dataset_rights_metadata(_logged_in_client): + +def test_get_dataset_rights_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/rights' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset + When the '/study' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset rights metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - rights_id = pytest.global_dataset_rights_id - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" + ) + viewer_response = _viewer_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 0 + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -# ------------------- SUBJECTS METADATA ------------------- # -def test_get_dataset_subjects_metadata(_logged_in_client): + assert response_data[0]["identifier"] == "Identifier" + assert response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert response_data[0]["rights"] == "Rights" + assert response_data[0]["uri"] == "URI" + assert response_data[1]["identifier"] == "Admin Identifier" + assert response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" + assert response_data[1]["rights"] == "Admin Rights" + assert response_data[1]["uri"] == "Admin URI" + assert response_data[2]["identifier"] == "Editor Identifier" + assert response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" + assert response_data[2]["rights"] == "Editor Rights" + assert response_data[2]["uri"] == "Editor URI" + + assert admin_response_data[0]["identifier"] == "Identifier" + assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert admin_response_data[0]["rights"] == "Rights" + assert admin_response_data[0]["uri"] == "URI" + assert admin_response_data[1]["identifier"] == "Admin Identifier" + assert admin_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" + assert admin_response_data[1]["rights"] == "Admin Rights" + assert admin_response_data[1]["uri"] == "Admin URI" + assert admin_response_data[2]["identifier"] == "Editor Identifier" + assert admin_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" + assert admin_response_data[2]["rights"] == "Editor Rights" + assert admin_response_data[2]["uri"] == "Editor URI" + + assert editor_response_data[0]["identifier"] == "Identifier" + assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert editor_response_data[0]["rights"] == "Rights" + assert editor_response_data[0]["uri"] == "URI" + assert editor_response_data[1]["identifier"] == "Admin Identifier" + assert editor_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" + assert editor_response_data[1]["rights"] == "Admin Rights" + assert editor_response_data[1]["uri"] == "Admin URI" + assert editor_response_data[2]["identifier"] == "Editor Identifier" + assert editor_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" + assert editor_response_data[2]["rights"] == "Editor Rights" + assert editor_response_data[2]["uri"] == "Editor URI" + + assert viewer_response_data[0]["identifier"] == "Identifier" + assert viewer_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert viewer_response_data[0]["rights"] == "Rights" + assert viewer_response_data[0]["uri"] == "URI" + assert viewer_response_data[1]["identifier"] == "Admin Identifier" + assert viewer_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" + assert viewer_response_data[1]["rights"] == "Admin Rights" + assert viewer_response_data[1]["uri"] == "Admin URI" + assert viewer_response_data[2]["identifier"] == "Editor Identifier" + assert viewer_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" + assert viewer_response_data[2]["rights"] == "Editor Rights" + assert viewer_response_data[2]["uri"] == "Editor URI" + + +def test_delete_dataset_rights_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - subjects metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/rights' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + rights metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + rights_id = pytest.global_dataset_rights_id + a_rights_id = pytest.global_dataset_rights_id_admin + e_rights_id = pytest.global_dataset_rights_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{a_rights_id}" + ) + editor_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{e_rights_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_post_dataset_subjects_metadata(_logged_in_client): +# ------------------- SUBJECTS METADATA ------------------- # +def test_post_dataset_subjects_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' @@ -1132,6 +3580,7 @@ def test_post_dataset_subjects_metadata(_logged_in_client): Then check that the response is valid and creates the dataset subjects metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id @@ -1147,6 +3596,8 @@ def test_post_dataset_subjects_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -1156,54 +3607,127 @@ def test_post_dataset_subjects_metadata(_logged_in_client): assert response_data[0]["scheme_uri"] == "Scheme URI" assert response_data[0]["subject"] == "Subject" assert response_data[0]["value_uri"] == "Value URI" + assert response_data[0]["classification_code"] == "Classification Code" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", + json=[ + { + "classification_code": "Classification Code", + "scheme": "Admin Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Admin Value URI", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_subject_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["scheme"] == "Admin Scheme" + assert admin_response_data[0]["scheme_uri"] == "Scheme URI" + assert admin_response_data[0]["subject"] == "Subject" + assert admin_response_data[0]["value_uri"] == "Admin Value URI" + assert admin_response_data[0]["classification_code"] == "Classification Code" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", + json=[ + { + "classification_code": "Classification Code", + "scheme": "Editor Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Editor Value URI", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_subject_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["scheme"] == "Editor Scheme" + assert editor_response_data[0]["scheme_uri"] == "Scheme URI" + assert editor_response_data[0]["subject"] == "Subject" + assert editor_response_data[0]["value_uri"] == "Editor Value URI" + assert editor_response_data[0]["classification_code"] == "Classification Code" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", + json=[ + { + "classification_code": "Classification Code", + "scheme": "Viewer Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Viewer Value URI", + } + ], + ) + assert viewer_response.status_code == 403 -def test_delete_dataset_subject_metadata(_logged_in_client): + +def test_get_dataset_subjects_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - subject metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + subjects metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - subject_id = pytest.global_dataset_subject_id - - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" - ) - - assert response.status_code == 204 - response_get = _logged_in_client.get( + response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/metadata/subject" ) - assert response_get.status_code == 200 - assert len(json.loads(response_get.data)) == 0 + assert response.status_code == 200 -# ------------------- TITLE METADATA ------------------- # -def test_get_dataset_title_metadata(_logged_in_client): +def test_delete_dataset_subject_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' + When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - title metadata content + subjects metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + subject_id = pytest.global_dataset_subject_id + admin_sub_id = pytest.global_dataset_subject_id_admin + editor_sub_id = pytest.global_dataset_subject_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{admin_sub_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{editor_sub_id}" ) - assert response.status_code == 200 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_post_dataset_title_metadata(_logged_in_client): +# ------------------- TITLE METADATA ------------------- # +def test_post_dataset_title_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' @@ -1211,23 +3735,268 @@ def test_post_dataset_title_metadata(_logged_in_client): Then check that the response is valid and creates the dataset title metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Title", "type": "Subtitle"}], + json=[{"title": "Owner Title", "type": "Subtitle"}], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_title_id = response_data[0]["id"] - assert response_data[0]["title"] == "Title" + assert response_data[0]["title"] == "Owner Title" assert response_data[0]["type"] == "Subtitle" + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title", + json=[{"title": "Admin Title", "type": "Subtitle"}], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_title_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["title"] == "Admin Title" + assert admin_response_data[0]["type"] == "Subtitle" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title", + json=[{"title": "Editor Title", "type": "Subtitle"}], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_title_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["title"] == "Editor Title" + assert editor_response_data[0]["type"] == "Subtitle" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title", + json=[{"title": "Viewer Title", "type": "Subtitle"}], + ) + + assert viewer_response.status_code == 403 + + +def test_get_dataset_title_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + title metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert len(response_data) == 4 + assert len(admin_response_data) == 4 + assert len(editor_response_data) == 4 + assert len(viewer_response_data) == 4 -def test_delete_dataset_title_metadata(_logged_in_client): + # search for maintitle index + # pylint: disable=line-too-long + main_title = next( + (index for (index, d) in enumerate(response_data) if d["type"] == "MainTitle"), + None, + ) + a_main_title = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["type"] == "MainTitle" + ), + None, + ) + e_main_title = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["type"] == "MainTitle" + ), + None, + ) + v_main_title = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["type"] == "MainTitle" + ), + None, + ) + # search for admin title index + admin_title = next( + ( + index + for (index, d) in enumerate(response_data) + if d["title"] == "Admin Title" + ), + None, + ) + a_admin_title = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["title"] == "Admin Title" + ), + None, + ) + e_admin_title = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["title"] == "Admin Title" + ), + None, + ) + v_admin_title = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["title"] == "Admin Title" + ), + None, + ) + + # search for editor title index + editor_title = next( + ( + index + for (index, d) in enumerate(response_data) + if d["title"] == "Editor Title" + ), + None, + ) + a_editor_title = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["title"] == "Editor Title" + ), + None, + ) + e_editor_title = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["title"] == "Editor Title" + ), + None, + ) + v_editor_title = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["title"] == "Editor Title" + ), + None, + ) + + # search for owner title index + own_title = next( + ( + index + for (index, d) in enumerate(response_data) + if d["title"] == "Owner Title" + ), + None, + ) + a_own_title = next( + ( + index + for (index, d) in enumerate(admin_response_data) + if d["title"] == "Owner Title" + ), + None, + ) + e_own_title = next( + ( + index + for (index, d) in enumerate(editor_response_data) + if d["title"] == "Owner Title" + ), + None, + ) + v_own_title = next( + ( + index + for (index, d) in enumerate(viewer_response_data) + if d["title"] == "Owner Title" + ), + None, + ) + + assert response_data[main_title]["title"] == "Dataset Title" + assert response_data[main_title]["type"] == "MainTitle" + assert response_data[own_title]["title"] == "Owner Title" + assert response_data[own_title]["type"] == "Subtitle" + assert response_data[admin_title]["title"] == "Admin Title" + assert response_data[admin_title]["type"] == "Subtitle" + assert response_data[editor_title]["title"] == "Editor Title" + assert response_data[editor_title]["type"] == "Subtitle" + + assert admin_response_data[a_main_title]["title"] == "Dataset Title" + assert admin_response_data[a_main_title]["type"] == "MainTitle" + assert admin_response_data[a_own_title]["title"] == "Owner Title" + assert admin_response_data[a_own_title]["type"] == "Subtitle" + assert admin_response_data[a_admin_title]["title"] == "Admin Title" + assert admin_response_data[a_admin_title]["type"] == "Subtitle" + assert admin_response_data[a_editor_title]["title"] == "Editor Title" + assert admin_response_data[a_editor_title]["type"] == "Subtitle" + + assert editor_response_data[e_main_title]["title"] == "Dataset Title" + assert editor_response_data[e_main_title]["type"] == "MainTitle" + assert editor_response_data[e_own_title]["title"] == "Owner Title" + assert editor_response_data[e_own_title]["type"] == "Subtitle" + assert editor_response_data[e_admin_title]["title"] == "Admin Title" + assert editor_response_data[e_admin_title]["type"] == "Subtitle" + assert editor_response_data[e_editor_title]["title"] == "Editor Title" + assert editor_response_data[e_editor_title]["type"] == "Subtitle" + + assert viewer_response_data[v_main_title]["title"] == "Dataset Title" + assert viewer_response_data[v_main_title]["type"] == "MainTitle" + assert viewer_response_data[v_own_title]["title"] == "Owner Title" + assert viewer_response_data[v_own_title]["type"] == "Subtitle" + assert viewer_response_data[v_admin_title]["title"] == "Admin Title" + assert viewer_response_data[v_admin_title]["type"] == "Subtitle" + assert viewer_response_data[v_editor_title]["title"] == "Editor Title" + assert viewer_response_data[v_editor_title]["type"] == "Subtitle" + + +def test_delete_dataset_title_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}' @@ -1235,21 +4004,27 @@ def test_delete_dataset_title_metadata(_logged_in_client): Then check that the response is valid and deletes the dataset title metadata content """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id title_id = pytest.global_dataset_title_id + admin_title_id = pytest.global_dataset_title_id_admin + editor_title_id = pytest.global_dataset_title_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" ) - - assert response.status_code == 204 - - response_get = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{admin_title_id}" ) - assert response_get.status_code == 200 - assert ( - len(json.loads(response_get.data)) == 1 - and json.loads(response_get.data)[0]["type"] == "MainTitle" + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{editor_title_id}" ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index 77f71677..9d8b2618 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -2,16 +2,18 @@ """Tests for the Study Metadata API endpoints""" import json +from time import sleep import pytest # ------------------- ARM METADATA ------------------- # -def test_post_arm_metadata(_logged_in_client): +def test_post_arm_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/arm' endpoint is requested (POST) THEN check that the response is vaild and create a new arm """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( @@ -25,6 +27,8 @@ def test_post_arm_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -38,20 +42,83 @@ def test_post_arm_metadata(_logged_in_client): "intervention2", ] + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/arm", + json=[ + { + "label": "Admin Label", + "type": "Experimental", + "description": "Arm Description", + "intervention_list": ["intervention1", "intervention2"], + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_admin_arm_id_admin = admin_response_data["arms"][1]["id"] + + assert admin_response_data["arms"][1]["label"] == "Admin Label" + + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/arm", + json=[ + { + "label": "Editor Label", + "type": "Experimental", + "description": "Arm Description", + "intervention_list": ["intervention1", "intervention2"], + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_editor_arm_id_editor = editor_response_data["arms"][2]["id"] + + assert editor_response_data["arms"][2]["label"] == "Editor Label" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/arm", + json=[ + { + "label": "Viewer Label", + "type": "Experimental", + "description": "Arm Description", + "intervention_list": ["intervention1", "intervention2"], + } + ], + ) + + assert viewer_response.status_code == 403 -def test_get_arm_metadata(_logged_in_client): + +def test_get_arm_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/arm/metadata' endpoint is requested (GET) THEN check that the response is valid and retrieves the arm metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.get(f"/study/{study_id}/metadata/arm") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/arm") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/arm") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/arm") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + assert response_data["arms"][0]["id"] == pytest.global_arm_id assert response_data["arms"][0]["label"] == "Label1" assert response_data["arms"][0]["type"] == "Experimental" assert response_data["arms"][0]["description"] == "Arm Description" @@ -59,30 +126,135 @@ def test_get_arm_metadata(_logged_in_client): "intervention1", "intervention2", ] + assert response_data["arms"][1]["id"] == pytest.global_admin_arm_id_admin + assert response_data["arms"][1]["label"] == "Admin Label" + assert response_data["arms"][1]["type"] == "Experimental" + assert response_data["arms"][1]["description"] == "Arm Description" + assert response_data["arms"][1]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + assert response_data["arms"][2]["id"] == pytest.global_editor_arm_id_editor + assert response_data["arms"][2]["label"] == "Editor Label" + assert response_data["arms"][2]["type"] == "Experimental" + assert response_data["arms"][2]["description"] == "Arm Description" + assert response_data["arms"][2]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + + assert admin_response_data["arms"][0]["id"] == pytest.global_arm_id + assert admin_response_data["arms"][0]["label"] == "Label1" + assert admin_response_data["arms"][0]["type"] == "Experimental" + assert admin_response_data["arms"][0]["description"] == "Arm Description" + assert admin_response_data["arms"][0]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + assert admin_response_data["arms"][1]["id"] == pytest.global_admin_arm_id_admin + assert admin_response_data["arms"][1]["label"] == "Admin Label" + assert admin_response_data["arms"][1]["type"] == "Experimental" + assert admin_response_data["arms"][1]["description"] == "Arm Description" + assert admin_response_data["arms"][1]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + assert admin_response_data["arms"][2]["id"] == pytest.global_editor_arm_id_editor + assert admin_response_data["arms"][2]["label"] == "Editor Label" + assert admin_response_data["arms"][2]["type"] == "Experimental" + assert admin_response_data["arms"][2]["description"] == "Arm Description" + assert admin_response_data["arms"][2]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + + assert editor_response_data["arms"][0]["id"] == pytest.global_arm_id + assert editor_response_data["arms"][0]["label"] == "Label1" + assert editor_response_data["arms"][0]["type"] == "Experimental" + assert editor_response_data["arms"][0]["description"] == "Arm Description" + assert editor_response_data["arms"][0]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + assert editor_response_data["arms"][1]["id"] == pytest.global_admin_arm_id_admin + assert editor_response_data["arms"][1]["label"] == "Admin Label" + assert editor_response_data["arms"][1]["type"] == "Experimental" + assert editor_response_data["arms"][1]["description"] == "Arm Description" + assert editor_response_data["arms"][1]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + assert editor_response_data["arms"][2]["id"] == pytest.global_editor_arm_id_editor + assert editor_response_data["arms"][2]["label"] == "Editor Label" + assert editor_response_data["arms"][2]["type"] == "Experimental" + assert editor_response_data["arms"][2]["description"] == "Arm Description" + assert editor_response_data["arms"][2]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + + assert viewer_response_data["arms"][0]["id"] == pytest.global_arm_id + assert viewer_response_data["arms"][0]["label"] == "Label1" + assert viewer_response_data["arms"][0]["type"] == "Experimental" + assert viewer_response_data["arms"][0]["description"] == "Arm Description" + assert viewer_response_data["arms"][0]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + assert viewer_response_data["arms"][1]["id"] == pytest.global_admin_arm_id_admin + assert viewer_response_data["arms"][1]["label"] == "Admin Label" + assert viewer_response_data["arms"][1]["type"] == "Experimental" + assert viewer_response_data["arms"][1]["description"] == "Arm Description" + assert viewer_response_data["arms"][1]["intervention_list"] == [ + "intervention1", + "intervention2", + ] + assert viewer_response_data["arms"][2]["id"] == pytest.global_editor_arm_id_editor + assert viewer_response_data["arms"][2]["label"] == "Editor Label" + assert viewer_response_data["arms"][2]["type"] == "Experimental" + assert viewer_response_data["arms"][2]["description"] == "Arm Description" + assert viewer_response_data["arms"][2]["intervention_list"] == [ + "intervention1", + "intervention2", + ] -def test_delete_arm_metadata(_logged_in_client): +def test_delete_arm_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID and arm ID WHEN the '/study/{study_id}/arm/metadata' endpoint is requested (DELETE) THEN check that the response is valid and deletes the arm metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore arm_id = pytest.global_arm_id + admin_arm_id = pytest.global_admin_arm_id_admin + editor_arm_id = pytest.global_editor_arm_id_editor + + # Verify viewer cannot delete arm + viewer_response = _viewer_client.delete(f"/study/{study_id}/metadata/arm/{arm_id}") response = _logged_in_client.delete(f"/study/{study_id}/metadata/arm/{arm_id}") + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/arm/{admin_arm_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/arm/{editor_arm_id}" + ) + assert viewer_response.status_code == 403 assert response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/arm") - assert len(json.loads(response_get.data)["arms"]) == 0 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- IPD METADATA ------------------- # -def test_post_available_ipd_metadata(_logged_in_client): +def test_post_available_ipd_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (POST) THEN check that the response is vaild and new IPD was created """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( @@ -96,6 +268,8 @@ def test_post_available_ipd_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -106,46 +280,181 @@ def test_post_available_ipd_metadata(_logged_in_client): assert response_data[0]["url"] == "google.com" assert response_data[0]["comment"] == "comment1" + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/available-ipd", + json=[ + { + "identifier": "identifier2", + "type": "Clinical Study Report", + "url": "google.com", + "comment": "comment2", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_available_ipd_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "identifier2" + assert admin_response_data[0]["type"] == "Clinical Study Report" + assert admin_response_data[0]["url"] == "google.com" + assert admin_response_data[0]["comment"] == "comment2" + + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/available-ipd", + json=[ + { + "identifier": "identifier3", + "type": "Clinical Study Report", + "url": "google.com", + "comment": "comment3", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_available_ipd_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["identifier"] == "identifier3" + assert editor_response_data[0]["type"] == "Clinical Study Report" + assert editor_response_data[0]["url"] == "google.com" + assert editor_response_data[0]["comment"] == "comment3" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/available-ipd", + json=[ + { + "identifier": "identifier4", + "type": "Clinical Study Report", + "url": "google.com", + "comment": "comment4", + } + ], + ) -def test_get_available_ipd_metadata(_logged_in_client): + assert viewer_response.status_code == 403 + + +def test_get_available_ipd_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (GET) THEN check that the response is vaild and retrieves the available IPD(s) """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.get(f"/study/{study_id}/metadata/available-ipd") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/available-ipd") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/available-ipd") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/available-ipd") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -def test_delete_available_ipd_metadata(_logged_in_client): + assert response_data[0]["identifier"] == "identifier1" + assert response_data[0]["type"] == "Clinical Study Report" + assert response_data[0]["url"] == "google.com" + assert response_data[0]["comment"] == "comment1" + assert response_data[1]["identifier"] == "identifier2" + assert response_data[1]["type"] == "Clinical Study Report" + assert response_data[1]["url"] == "google.com" + assert response_data[1]["comment"] == "comment2" + assert response_data[2]["identifier"] == "identifier3" + assert response_data[2]["type"] == "Clinical Study Report" + assert response_data[2]["url"] == "google.com" + assert response_data[2]["comment"] == "comment3" + + assert admin_response_data[0]["identifier"] == "identifier1" + assert admin_response_data[0]["type"] == "Clinical Study Report" + assert admin_response_data[0]["url"] == "google.com" + assert admin_response_data[0]["comment"] == "comment1" + assert admin_response_data[1]["identifier"] == "identifier2" + assert admin_response_data[1]["type"] == "Clinical Study Report" + assert admin_response_data[1]["url"] == "google.com" + assert admin_response_data[1]["comment"] == "comment2" + assert admin_response_data[2]["identifier"] == "identifier3" + assert admin_response_data[2]["type"] == "Clinical Study Report" + assert admin_response_data[2]["url"] == "google.com" + assert admin_response_data[2]["comment"] == "comment3" + + assert editor_response_data[0]["identifier"] == "identifier1" + assert editor_response_data[0]["type"] == "Clinical Study Report" + assert editor_response_data[0]["url"] == "google.com" + assert editor_response_data[0]["comment"] == "comment1" + assert editor_response_data[1]["identifier"] == "identifier2" + assert editor_response_data[1]["type"] == "Clinical Study Report" + assert editor_response_data[1]["url"] == "google.com" + assert editor_response_data[1]["comment"] == "comment2" + assert editor_response_data[2]["identifier"] == "identifier3" + assert editor_response_data[2]["type"] == "Clinical Study Report" + assert editor_response_data[2]["url"] == "google.com" + assert editor_response_data[2]["comment"] == "comment3" + + assert viewer_response_data[0]["identifier"] == "identifier1" + assert viewer_response_data[0]["type"] == "Clinical Study Report" + assert viewer_response_data[0]["url"] == "google.com" + assert viewer_response_data[0]["comment"] == "comment1" + assert viewer_response_data[1]["identifier"] == "identifier2" + assert viewer_response_data[1]["type"] == "Clinical Study Report" + assert viewer_response_data[1]["url"] == "google.com" + assert viewer_response_data[1]["comment"] == "comment2" + assert viewer_response_data[2]["identifier"] == "identifier3" + assert viewer_response_data[2]["type"] == "Clinical Study Report" + assert viewer_response_data[2]["url"] == "google.com" + assert viewer_response_data[2]["comment"] == "comment3" + + +def test_delete_available_ipd_metadata(clients): """ Given a Flask application configured for testing and a study ID and available IPD ID WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (DELETE) THEN check that the response is vaild and deletes the available IPD """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore available_ipd_id = pytest.global_available_ipd_id + admin_avail_ipd = pytest.global_available_ipd_id_admin + editor_avail_ipd = pytest.global_available_ipd_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/available-ipd/{available_ipd_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/metadata/available-ipd/{available_ipd_id}" ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/available-ipd/{admin_avail_ipd}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/available-ipd/{editor_avail_ipd}" + ) + assert viewer_response.status_code == 403 assert response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/available-ipd") - - assert len(json.loads(response_get.data)) == 0 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- CENTRAL CONTACT METADATA ------------------- # -def test_post_cc_metadata(_logged_in_client): +def test_post_cc_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/central-contact' endpoint is requested (POST) THEN check that the response is valid and creates the central contact metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( @@ -161,6 +470,8 @@ def test_post_cc_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -174,19 +485,84 @@ def test_post_cc_metadata(_logged_in_client): assert response_data[0]["email_address"] == "sample@gmail.com" assert response_data[0]["central_contact"] is True + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/central-contact", + json=[ + { + "name": "admin-central-contact", + "affiliation": "affiliation", + "role": "role", + "phone": "808", + "phone_ext": "909", + "email_address": "sample1@gmail.com", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_admin_cc_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["name"] == "admin-central-contact" + assert admin_response_data[0]["affiliation"] == "affiliation" + assert admin_response_data[0]["role"] is None + assert admin_response_data[0]["phone"] == "808" + assert admin_response_data[0]["phone_ext"] == "909" + assert admin_response_data[0]["email_address"] == "sample1@gmail.com" + assert admin_response_data[0]["central_contact"] is True + + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/central-contact", + json=[ + { + "name": "editor-central-contact", + "affiliation": "affiliation", + "role": "role", + "phone": "808", + "phone_ext": "909", + "email_address": "sample2@gmail.com", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_editor_cc_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["name"] == "editor-central-contact" + assert editor_response_data[0]["affiliation"] == "affiliation" + assert editor_response_data[0]["role"] is None + assert editor_response_data[0]["phone"] == "808" + assert editor_response_data[0]["phone_ext"] == "909" + assert editor_response_data[0]["email_address"] == "sample2@gmail.com" + assert editor_response_data[0]["central_contact"] is True -def test_get_cc_metadata(_logged_in_client): + +def test_get_cc_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/central-contact' endpoint is requested (GET) THEN check that the response is valid and retrieves the central contact metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.get(f"/study/{study_id}/metadata/central-contact") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/central-contact") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/central-contact") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/central-contact") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) assert response_data[0]["name"] == "central-contact" assert response_data[0]["affiliation"] == "affiliation" @@ -196,8 +572,32 @@ def test_get_cc_metadata(_logged_in_client): assert response_data[0]["email_address"] == "sample@gmail.com" assert response_data[0]["central_contact"] is True - -def test_delete_cc_metadata(_logged_in_client): + assert admin_response_data[0]["name"] == "central-contact" + assert admin_response_data[0]["affiliation"] == "affiliation" + assert admin_response_data[0]["role"] is None + assert admin_response_data[0]["phone"] == "808" + assert admin_response_data[0]["phone_ext"] == "909" + assert admin_response_data[0]["email_address"] == "sample@gmail.com" + assert admin_response_data[0]["central_contact"] is True + + assert editor_response_data[0]["name"] == "central-contact" + assert editor_response_data[0]["affiliation"] == "affiliation" + assert editor_response_data[0]["role"] is None + assert editor_response_data[0]["phone"] == "808" + assert editor_response_data[0]["phone_ext"] == "909" + assert editor_response_data[0]["email_address"] == "sample@gmail.com" + assert editor_response_data[0]["central_contact"] is True + + assert viewer_response_data[0]["name"] == "central-contact" + assert viewer_response_data[0]["affiliation"] == "affiliation" + assert viewer_response_data[0]["role"] is None + assert viewer_response_data[0]["phone"] == "808" + assert viewer_response_data[0]["phone_ext"] == "909" + assert viewer_response_data[0]["email_address"] == "sample@gmail.com" + assert viewer_response_data[0]["central_contact"] is True + + +def test_delete_cc_metadata(clients): """ Given a Flask application configured for testing and a study ID and central contact ID @@ -205,40 +605,40 @@ def test_delete_cc_metadata(_logged_in_client): endpoint is requested (DELETE) THEN check that the response is valid and deletes the central contact metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore central_contact_id = pytest.global_cc_id + admin_cc_id = pytest.global_admin_cc_id_admin + editor_cc_id = pytest.global_editor_cc_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/central-contact/{central_contact_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/metadata/central-contact/{central_contact_id}" ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/central-contact/{admin_cc_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/central-contact/{editor_cc_id}" + ) + assert viewer_response.status_code == 403 assert response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/central-contact") - - assert len(json.loads(response_get.data)) == 0 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- COLLABORATORS METADATA ------------------- # -def test_get_collaborators_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (GET) - THEN check that the response is valid and retrieves the collaborators metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/collaborators") - - assert response.status_code == 200 - - -def test_put_collaborators_metadata(_logged_in_client): +def test_put_collaborators_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (POST) THEN check that the response is valid and creates the collaborators metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -253,27 +653,78 @@ def test_put_collaborators_metadata(_logged_in_client): assert response_data[0] == "collaborator1123" + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/collaborators", + json=[ + "admin-collaborator1123", + ], + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- CONDITIONS METADATA ------------------- # -def test_get_conditions_metadata(_logged_in_client): + assert admin_response_data[0] == "admin-collaborator1123" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/collaborators", + json=[ + "editor-collaborator1123", + ], + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data[0] == "editor-collaborator1123" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/metadata/collaborators", + json=[ + "viewer-collaborator1123", + ], + ) + + assert viewer_response.status_code == 403 + + +def test_get_collaborators_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) - THEN check that the response is valid and retrieves the conditions metadata + WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (GET) + THEN check that the response is valid and retrieves the collaborators metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/conditions") + response = _logged_in_client.get(f"/study/{study_id}/metadata/collaborators") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/collaborators") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/collaborators") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/collaborators") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data[0] == "editor-collaborator1123" + assert admin_response_data[0] == "editor-collaborator1123" + assert editor_response_data[0] == "editor-collaborator1123" + assert viewer_response_data[0] == "editor-collaborator1123" -def test_put_conditions_metadata(_logged_in_client): +# ------------------- CONDITIONS METADATA ------------------- # +def test_put_conditions_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (POST) THEN check that the response is valid and creates the conditions metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -294,27 +745,96 @@ def test_put_conditions_metadata(_logged_in_client): assert response_data[2] == "keywords string" assert response_data[3] == "size string" + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/conditions", + json=[ + "true", + "admin-conditions string", + "admin-keywords string", + "admin-size string", + ], + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- DESCRIPTION METADATA ------------------- # -def test_get_description_metadata(_logged_in_client): + assert admin_response_data[0] == "true" + assert admin_response_data[1] == "admin-conditions string" + assert admin_response_data[2] == "admin-keywords string" + assert admin_response_data[3] == "admin-size string" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/conditions", + json=[ + "true", + "editor-conditions string", + "editor-keywords string", + "editor-size string", + ], + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data[0] == "true" + assert editor_response_data[1] == "editor-conditions string" + assert editor_response_data[2] == "editor-keywords string" + assert editor_response_data[3] == "editor-size string" + + +def test_get_conditions_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) - THEN check that the response is valid and retrieves the description metadata + WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) + THEN check that the response is valid and retrieves the conditions metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/description") + response = _logged_in_client.get(f"/study/{study_id}/metadata/conditions") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/conditions") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/conditions") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/conditions") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data[0] == "true" + assert response_data[1] == "editor-conditions string" + assert response_data[2] == "editor-keywords string" + assert response_data[3] == "editor-size string" + assert admin_response_data[0] == "true" + assert admin_response_data[1] == "editor-conditions string" + assert admin_response_data[2] == "editor-keywords string" + assert admin_response_data[3] == "editor-size string" -def test_put_description_metadata(_logged_in_client): + assert editor_response_data[0] == "true" + assert editor_response_data[1] == "editor-conditions string" + assert editor_response_data[2] == "editor-keywords string" + assert editor_response_data[3] == "editor-size string" + + assert viewer_response_data[0] == "true" + assert viewer_response_data[1] == "editor-conditions string" + assert viewer_response_data[2] == "editor-keywords string" + assert viewer_response_data[3] == "editor-size string" + + +# ------------------- DESCRIPTION METADATA ------------------- # +def test_put_description_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/description' endpoint is requested (POST) THEN check that the response is valid and creates the description metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -331,27 +851,90 @@ def test_put_description_metadata(_logged_in_client): assert response_data["brief_summary"] == "brief_summary" assert response_data["detailed_description"] == "detailed_description" + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/description", + json={ + "brief_summary": "admin-brief_summary", + "detailed_description": "admin-detailed_description", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- DESIGN METADATA ------------------- # -def test_get_design_metadata(_logged_in_client): + assert admin_response_data["brief_summary"] == "admin-brief_summary" + assert admin_response_data["detailed_description"] == "admin-detailed_description" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/description", + json={ + "brief_summary": "editor-brief_summary", + "detailed_description": "editor-detailed_description", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["brief_summary"] == "editor-brief_summary" + assert editor_response_data["detailed_description"] == "editor-detailed_description" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/metadata/description", + json={ + "brief_summary": "viewer-brief_summary", + "detailed_description": "viewer-detailed_description", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_description_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/design' endpoint is requested (GET) - THEN check that the response is valid and retrieves the design metadata + WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) + THEN check that the response is valid and retrieves the description metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/design") + response = _logged_in_client.get(f"/study/{study_id}/metadata/description") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/description") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/description") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/description") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["brief_summary"] == "editor-brief_summary" + assert response_data["detailed_description"] == "editor-detailed_description" + + assert admin_response_data["brief_summary"] == "editor-brief_summary" + assert admin_response_data["detailed_description"] == "editor-detailed_description" + + assert editor_response_data["brief_summary"] == "editor-brief_summary" + assert editor_response_data["detailed_description"] == "editor-detailed_description" + assert viewer_response_data["brief_summary"] == "editor-brief_summary" + assert viewer_response_data["detailed_description"] == "editor-detailed_description" -def test_put_design_metadata(_logged_in_client): + +# ------------------- DESIGN METADATA ------------------- # +def test_put_design_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/design' endpoint is requested (PUT) THEN check that the response is valid and creates the design metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -403,27 +986,267 @@ def test_put_design_metadata(_logged_in_client): assert response_data["target_duration"] == "rewrwe" assert response_data["number_groups_cohorts"] == 1 - -# ------------------- ELIGIBILITY METADATA ------------------- # -def test_get_eligibility_metadata(_logged_in_client): + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/design", + json={ + "design_allocation": "admin-dfasdfasd", + "study_type": "Interventional", + "design_intervention_model": "Treatment", + "design_intervention_model_description": "dfadf", + "design_primary_purpose": "Parallel Assignment", + "design_masking": "Double", + "design_masking_description": "tewsfdasf", + "design_who_masked_list": ["Participant", "Care Provider"], + "phase_list": ["N/A"], + "enrollment_count": 3, + "enrollment_type": "Actual", + "number_arms": 2, + "design_observational_model_list": ["Cohort", "Case-Control"], + "design_time_perspective_list": ["Other"], + "bio_spec_retention": "None Retained", + "bio_spec_description": "dfasdf", + "target_duration": "rewrwe", + "number_groups_cohorts": 1, + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["design_allocation"] == "admin-dfasdfasd" + assert admin_response_data["study_type"] == "Interventional" + assert admin_response_data["design_intervention_model"] == "Treatment" + assert admin_response_data["design_intervention_model_description"] == "dfadf" + assert admin_response_data["design_primary_purpose"] == "Parallel Assignment" + assert admin_response_data["design_masking"] == "Double" + assert admin_response_data["design_masking_description"] == "tewsfdasf" + assert admin_response_data["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert admin_response_data["phase_list"] == ["N/A"] + assert admin_response_data["enrollment_count"] == 3 + assert admin_response_data["enrollment_type"] == "Actual" + assert admin_response_data["number_arms"] == 2 + assert admin_response_data["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert admin_response_data["design_time_perspective_list"] == ["Other"] + assert admin_response_data["bio_spec_retention"] == "None Retained" + assert admin_response_data["bio_spec_description"] == "dfasdf" + assert admin_response_data["target_duration"] == "rewrwe" + assert admin_response_data["number_groups_cohorts"] == 1 + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/design", + json={ + "design_allocation": "editor-dfasdfasd", + "study_type": "Interventional", + "design_intervention_model": "Treatment", + "design_intervention_model_description": "dfadf", + "design_primary_purpose": "Parallel Assignment", + "design_masking": "Double", + "design_masking_description": "tewsfdasf", + "design_who_masked_list": ["Participant", "Care Provider"], + "phase_list": ["N/A"], + "enrollment_count": 3, + "enrollment_type": "Actual", + "number_arms": 2, + "design_observational_model_list": ["Cohort", "Case-Control"], + "design_time_perspective_list": ["Other"], + "bio_spec_retention": "None Retained", + "bio_spec_description": "dfasdf", + "target_duration": "rewrwe", + "number_groups_cohorts": 1, + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["design_allocation"] == "editor-dfasdfasd" + assert editor_response_data["study_type"] == "Interventional" + assert editor_response_data["design_intervention_model"] == "Treatment" + assert editor_response_data["design_intervention_model_description"] == "dfadf" + assert editor_response_data["design_primary_purpose"] == "Parallel Assignment" + assert editor_response_data["design_masking"] == "Double" + assert editor_response_data["design_masking_description"] == "tewsfdasf" + assert editor_response_data["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert editor_response_data["phase_list"] == ["N/A"] + assert editor_response_data["enrollment_count"] == 3 + assert editor_response_data["enrollment_type"] == "Actual" + assert editor_response_data["number_arms"] == 2 + assert editor_response_data["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert editor_response_data["design_time_perspective_list"] == ["Other"] + assert editor_response_data["bio_spec_retention"] == "None Retained" + assert editor_response_data["bio_spec_description"] == "dfasdf" + assert editor_response_data["target_duration"] == "rewrwe" + assert editor_response_data["number_groups_cohorts"] == 1 + + viewer_response = _viewer_client.put( + f"/study/{study_id}/metadata/design", + json={ + "design_allocation": "viewer-dfasdfasd", + "study_type": "Interventional", + "design_intervention_model": "Treatment", + "design_intervention_model_description": "dfadf", + "design_primary_purpose": "Parallel Assignment", + "design_masking": "Double", + "design_masking_description": "tewsfdasf", + "design_who_masked_list": ["Participant", "Care Provider"], + "phase_list": ["N/A"], + "enrollment_count": 3, + "enrollment_type": "Actual", + "number_arms": 2, + "design_observational_model_list": ["Cohort", "Case-Control"], + "design_time_perspective_list": ["Other"], + "bio_spec_retention": "None Retained", + "bio_spec_description": "dfasdf", + "target_duration": "rewrwe", + "number_groups_cohorts": 1, + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_design_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/eligibility' endpoint is requested (GET) - THEN check that the response is valid and retrieves the eligibility metadata + WHEN the '/study/{study_id}/metadata/design' endpoint is requested (GET) + THEN check that the response is valid and retrieves the design metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/eligibility") + response = _logged_in_client.get(f"/study/{study_id}/metadata/design") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/design") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/design") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/design") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + assert response_data["design_allocation"] == "editor-dfasdfasd" + assert response_data["study_type"] == "Interventional" + assert response_data["design_intervention_model"] == "Treatment" + assert response_data["design_intervention_model_description"] == "dfadf" + assert response_data["design_primary_purpose"] == "Parallel Assignment" + assert response_data["design_masking"] == "Double" + assert response_data["design_masking_description"] == "tewsfdasf" + assert response_data["design_who_masked_list"] == ["Participant", "Care Provider"] + assert response_data["phase_list"] == ["N/A"] + assert response_data["enrollment_count"] == 3 + assert response_data["enrollment_type"] == "Actual" + assert response_data["number_arms"] == 2 + assert response_data["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert response_data["design_time_perspective_list"] == ["Other"] + assert response_data["bio_spec_retention"] == "None Retained" + assert response_data["bio_spec_description"] == "dfasdf" + assert response_data["target_duration"] == "rewrwe" + assert response_data["number_groups_cohorts"] == 1 -def test_put_eligibility_metadata(_logged_in_client): + assert admin_response_data["design_allocation"] == "editor-dfasdfasd" + assert admin_response_data["study_type"] == "Interventional" + assert admin_response_data["design_intervention_model"] == "Treatment" + assert admin_response_data["design_intervention_model_description"] == "dfadf" + assert admin_response_data["design_primary_purpose"] == "Parallel Assignment" + assert admin_response_data["design_masking"] == "Double" + assert admin_response_data["design_masking_description"] == "tewsfdasf" + assert admin_response_data["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert admin_response_data["phase_list"] == ["N/A"] + assert admin_response_data["enrollment_count"] == 3 + assert admin_response_data["enrollment_type"] == "Actual" + assert admin_response_data["number_arms"] == 2 + assert admin_response_data["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert admin_response_data["design_time_perspective_list"] == ["Other"] + assert admin_response_data["bio_spec_retention"] == "None Retained" + assert admin_response_data["bio_spec_description"] == "dfasdf" + assert admin_response_data["target_duration"] == "rewrwe" + assert admin_response_data["number_groups_cohorts"] == 1 + + assert editor_response_data["design_allocation"] == "editor-dfasdfasd" + assert editor_response_data["study_type"] == "Interventional" + assert editor_response_data["design_intervention_model"] == "Treatment" + assert editor_response_data["design_intervention_model_description"] == "dfadf" + assert editor_response_data["design_primary_purpose"] == "Parallel Assignment" + assert editor_response_data["design_masking"] == "Double" + assert editor_response_data["design_masking_description"] == "tewsfdasf" + assert editor_response_data["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert editor_response_data["phase_list"] == ["N/A"] + assert editor_response_data["enrollment_count"] == 3 + assert editor_response_data["enrollment_type"] == "Actual" + assert editor_response_data["number_arms"] == 2 + assert editor_response_data["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert editor_response_data["design_time_perspective_list"] == ["Other"] + assert editor_response_data["bio_spec_retention"] == "None Retained" + assert editor_response_data["bio_spec_description"] == "dfasdf" + assert editor_response_data["target_duration"] == "rewrwe" + assert editor_response_data["number_groups_cohorts"] == 1 + + assert viewer_response_data["design_allocation"] == "editor-dfasdfasd" + assert viewer_response_data["study_type"] == "Interventional" + assert viewer_response_data["design_intervention_model"] == "Treatment" + assert viewer_response_data["design_intervention_model_description"] == "dfadf" + assert viewer_response_data["design_primary_purpose"] == "Parallel Assignment" + assert viewer_response_data["design_masking"] == "Double" + assert viewer_response_data["design_masking_description"] == "tewsfdasf" + assert viewer_response_data["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert viewer_response_data["phase_list"] == ["N/A"] + assert viewer_response_data["enrollment_count"] == 3 + assert viewer_response_data["enrollment_type"] == "Actual" + assert viewer_response_data["number_arms"] == 2 + assert viewer_response_data["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert viewer_response_data["design_time_perspective_list"] == ["Other"] + assert viewer_response_data["bio_spec_retention"] == "None Retained" + assert viewer_response_data["bio_spec_description"] == "dfasdf" + assert viewer_response_data["target_duration"] == "rewrwe" + assert viewer_response_data["number_groups_cohorts"] == 1 + + +# ------------------- ELIGIBILITY METADATA ------------------- # +def test_put_eligibility_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/eligibility' endpoint is requested (PUT) THEN check that the response is valid and updates the eligibility metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -460,27 +1283,180 @@ def test_put_eligibility_metadata(_logged_in_client): assert response_data["study_population"] == "study_population" assert response_data["sampling_method"] == "Probability Sample" + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/eligibility", + json={ + "gender": "All", + "gender_based": "Yes", + "gender_description": "admin-none", + "minimum_age_value": 18, + "maximum_age_value": 61, + "minimum_age_unit": "1", + "maximum_age_unit": "2", + "healthy_volunteers": "Yes", + "inclusion_criteria": ["tests"], + "exclusion_criteria": ["Probability Sample"], + "study_population": "study_population", + "sampling_method": "Probability Sample", + }, + ) -# ------------------- IDENTIFICATION METADATA ------------------- # -def test_get_identification_metadata(_logged_in_client): + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["gender"] == "All" + assert admin_response_data["gender_based"] == "Yes" + assert admin_response_data["gender_description"] == "admin-none" + assert admin_response_data["minimum_age_value"] == 18 + assert admin_response_data["maximum_age_value"] == 61 + assert admin_response_data["minimum_age_unit"] == "1" + assert admin_response_data["maximum_age_unit"] == "2" + assert admin_response_data["healthy_volunteers"] == "Yes" + assert admin_response_data["inclusion_criteria"] == ["tests"] + assert admin_response_data["exclusion_criteria"] == ["Probability Sample"] + assert admin_response_data["study_population"] == "study_population" + assert admin_response_data["sampling_method"] == "Probability Sample" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/eligibility", + json={ + "gender": "All", + "gender_based": "Yes", + "gender_description": "editor-none", + "minimum_age_value": 18, + "maximum_age_value": 61, + "minimum_age_unit": "1", + "maximum_age_unit": "2", + "healthy_volunteers": "Yes", + "inclusion_criteria": ["tests"], + "exclusion_criteria": ["Probability Sample"], + "study_population": "study_population", + "sampling_method": "Probability Sample", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["gender"] == "All" + assert editor_response_data["gender_based"] == "Yes" + assert editor_response_data["gender_description"] == "editor-none" + assert editor_response_data["minimum_age_value"] == 18 + assert editor_response_data["maximum_age_value"] == 61 + assert editor_response_data["minimum_age_unit"] == "1" + assert editor_response_data["maximum_age_unit"] == "2" + assert editor_response_data["healthy_volunteers"] == "Yes" + assert editor_response_data["inclusion_criteria"] == ["tests"] + assert editor_response_data["exclusion_criteria"] == ["Probability Sample"] + assert editor_response_data["study_population"] == "study_population" + assert editor_response_data["sampling_method"] == "Probability Sample" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/metadata/eligibility", + json={ + "gender": "All", + "gender_based": "Yes", + "gender_description": "viewer-none", + "minimum_age_value": 18, + "maximum_age_value": 61, + "minimum_age_unit": "1", + "maximum_age_unit": "2", + "healthy_volunteers": "Yes", + "inclusion_criteria": ["tests"], + "exclusion_criteria": ["Probability Sample"], + "study_population": "study_population", + "sampling_method": "Probability Sample", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_eligibility_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) - THEN check that the response is valid and retrieves the identification metadata + WHEN the '/study/{study_id}/metadata/eligibility' endpoint is requested (GET) + THEN check that the response is valid and retrieves the eligibility metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/identification") + response = _logged_in_client.get(f"/study/{study_id}/metadata/eligibility") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/eligibility") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/eligibility") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/eligibility") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["gender"] == "All" + assert response_data["gender_based"] == "Yes" + assert response_data["gender_description"] == "editor-none" + assert response_data["minimum_age_value"] == 18 + assert response_data["maximum_age_value"] == 61 + assert response_data["minimum_age_unit"] == "1" + assert response_data["maximum_age_unit"] == "2" + assert response_data["healthy_volunteers"] == "Yes" + assert response_data["inclusion_criteria"] == ["tests"] + assert response_data["exclusion_criteria"] == ["Probability Sample"] + assert response_data["study_population"] == "study_population" + assert response_data["sampling_method"] == "Probability Sample" + + assert admin_response_data["gender"] == "All" + assert admin_response_data["gender_based"] == "Yes" + assert admin_response_data["gender_description"] == "editor-none" + assert admin_response_data["minimum_age_value"] == 18 + assert admin_response_data["maximum_age_value"] == 61 + assert admin_response_data["minimum_age_unit"] == "1" + assert admin_response_data["maximum_age_unit"] == "2" + assert admin_response_data["healthy_volunteers"] == "Yes" + assert admin_response_data["inclusion_criteria"] == ["tests"] + assert admin_response_data["exclusion_criteria"] == ["Probability Sample"] + assert admin_response_data["study_population"] == "study_population" + assert admin_response_data["sampling_method"] == "Probability Sample" + + assert editor_response_data["gender"] == "All" + assert editor_response_data["gender_based"] == "Yes" + assert editor_response_data["gender_description"] == "editor-none" + assert editor_response_data["minimum_age_value"] == 18 + assert editor_response_data["maximum_age_value"] == 61 + assert editor_response_data["minimum_age_unit"] == "1" + assert editor_response_data["maximum_age_unit"] == "2" + assert editor_response_data["healthy_volunteers"] == "Yes" + assert editor_response_data["inclusion_criteria"] == ["tests"] + assert editor_response_data["exclusion_criteria"] == ["Probability Sample"] + assert editor_response_data["study_population"] == "study_population" + assert editor_response_data["sampling_method"] == "Probability Sample" + + assert viewer_response_data["gender"] == "All" + assert viewer_response_data["gender_based"] == "Yes" + assert viewer_response_data["gender_description"] == "editor-none" + assert viewer_response_data["minimum_age_value"] == 18 + assert viewer_response_data["maximum_age_value"] == 61 + assert viewer_response_data["minimum_age_unit"] == "1" + assert viewer_response_data["maximum_age_unit"] == "2" + assert viewer_response_data["healthy_volunteers"] == "Yes" + assert viewer_response_data["inclusion_criteria"] == ["tests"] + assert viewer_response_data["exclusion_criteria"] == ["Probability Sample"] + assert viewer_response_data["study_population"] == "study_population" + assert viewer_response_data["sampling_method"] == "Probability Sample" -def test_post_identification_metadata(_logged_in_client): +# ------------------- IDENTIFICATION METADATA ------------------- # +def test_post_identification_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (POST) THEN check that the response is valid and creates the identification metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( @@ -502,6 +1478,8 @@ def test_post_identification_metadata(_logged_in_client): ], }, ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -516,110 +1494,448 @@ def test_post_identification_metadata(_logged_in_client): assert response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" assert response_data["secondary"][0]["identifier_link"] == "link" + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/identification", + json={ + "primary": { + "identifier": "admin-first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_identification_id_admin = admin_response_data["secondary"][1]["id"] + + assert admin_response_data["primary"]["identifier"] == "admin-first" + assert admin_response_data["primary"]["identifier_type"] == "test" + assert admin_response_data["primary"]["identifier_domain"] == "domain" + assert admin_response_data["primary"]["identifier_link"] == "link" + assert admin_response_data["secondary"][1]["identifier"] == "test" + assert admin_response_data["secondary"][1]["identifier_type"] == "test" + assert admin_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" + assert admin_response_data["secondary"][1]["identifier_link"] == "link" + + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/identification", + json={ + "primary": { + "identifier": "editor-first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_identification_id_editor = editor_response_data["secondary"][2]["id"] + + assert editor_response_data["primary"]["identifier"] == "editor-first" + assert editor_response_data["primary"]["identifier_type"] == "test" + assert editor_response_data["primary"]["identifier_domain"] == "domain" + assert editor_response_data["primary"]["identifier_link"] == "link" + assert editor_response_data["secondary"][2]["identifier"] == "test" + assert editor_response_data["secondary"][2]["identifier_type"] == "test" + assert editor_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" + assert editor_response_data["secondary"][2]["identifier_link"] == "link" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/identification", + json={ + "primary": { + "identifier": "viewer-first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_identification_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.get(f"/study/{study_id}/metadata/identification") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/identification") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/identification") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/identification") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 -def test_delete_identification_metadata(_logged_in_client): + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["primary"]["identifier"] == "editor-first" + assert response_data["primary"]["identifier_type"] == "test" + assert response_data["primary"]["identifier_domain"] == "domain" + assert response_data["primary"]["identifier_link"] == "link" + assert response_data["secondary"][0]["identifier"] == "test" + assert response_data["secondary"][0]["identifier_type"] == "test" + assert response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" + assert response_data["secondary"][0]["identifier_link"] == "link" + assert response_data["secondary"][1]["identifier"] == "test" + assert response_data["secondary"][1]["identifier_type"] == "test" + assert response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" + assert response_data["secondary"][1]["identifier_link"] == "link" + assert response_data["secondary"][2]["identifier"] == "test" + assert response_data["secondary"][2]["identifier_type"] == "test" + assert response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" + assert response_data["secondary"][2]["identifier_link"] == "link" + + assert admin_response_data["primary"]["identifier"] == "editor-first" + assert admin_response_data["primary"]["identifier_type"] == "test" + assert admin_response_data["primary"]["identifier_domain"] == "domain" + assert admin_response_data["primary"]["identifier_link"] == "link" + assert admin_response_data["secondary"][0]["identifier"] == "test" + assert admin_response_data["secondary"][0]["identifier_type"] == "test" + assert admin_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" + assert admin_response_data["secondary"][0]["identifier_link"] == "link" + assert admin_response_data["secondary"][1]["identifier"] == "test" + assert admin_response_data["secondary"][1]["identifier_type"] == "test" + assert admin_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" + assert admin_response_data["secondary"][1]["identifier_link"] == "link" + assert admin_response_data["secondary"][2]["identifier"] == "test" + assert admin_response_data["secondary"][2]["identifier_type"] == "test" + assert admin_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" + assert admin_response_data["secondary"][2]["identifier_link"] == "link" + + assert editor_response_data["primary"]["identifier"] == "editor-first" + assert editor_response_data["primary"]["identifier_type"] == "test" + assert editor_response_data["primary"]["identifier_domain"] == "domain" + assert editor_response_data["primary"]["identifier_link"] == "link" + assert editor_response_data["secondary"][0]["identifier"] == "test" + assert editor_response_data["secondary"][0]["identifier_type"] == "test" + assert editor_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" + assert editor_response_data["secondary"][0]["identifier_link"] == "link" + assert editor_response_data["secondary"][1]["identifier"] == "test" + assert editor_response_data["secondary"][1]["identifier_type"] == "test" + assert editor_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" + assert editor_response_data["secondary"][1]["identifier_link"] == "link" + assert editor_response_data["secondary"][2]["identifier"] == "test" + assert editor_response_data["secondary"][2]["identifier_type"] == "test" + assert editor_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" + assert editor_response_data["secondary"][2]["identifier_link"] == "link" + + assert viewer_response_data["primary"]["identifier"] == "editor-first" + assert viewer_response_data["primary"]["identifier_type"] == "test" + assert viewer_response_data["primary"]["identifier_domain"] == "domain" + assert viewer_response_data["primary"]["identifier_link"] == "link" + assert viewer_response_data["secondary"][0]["identifier"] == "test" + assert viewer_response_data["secondary"][0]["identifier_type"] == "test" + assert viewer_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" + assert viewer_response_data["secondary"][0]["identifier_link"] == "link" + assert viewer_response_data["secondary"][1]["identifier"] == "test" + assert viewer_response_data["secondary"][1]["identifier_type"] == "test" + assert viewer_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" + assert viewer_response_data["secondary"][1]["identifier_link"] == "link" + assert viewer_response_data["secondary"][2]["identifier"] == "test" + assert viewer_response_data["secondary"][2]["identifier_type"] == "test" + assert viewer_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" + assert viewer_response_data["secondary"][2]["identifier_link"] == "link" + + +def test_delete_identification_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) THEN check that the response is valid and retrieves the identification metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore identification_id = pytest.global_identification_id + admin_identification_id = pytest.global_identification_id_admin + editor_identification_id = pytest.global_identification_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/identification/{identification_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/metadata/identification/{identification_id}" ) - assert response.status_code == 204 + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/identification/{admin_identification_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/identification/{editor_identification_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- INTERVENTION METADATA ------------------- # +def test_post_intervention_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (POST) + THEN check that the response is valid and creates the intervention metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.post( + f"/study/{study_id}/metadata/intervention", + json=[ + { + "type": "Device", + "name": "name test", + "description": "desc", + "arm_group_label_list": ["test", "one"], + "other_name_list": ["uhh", "yes"], + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert response.status_code == 201 + response_data = json.loads(response.data) + pytest.global_intervention_id = response_data[0]["id"] + + assert response_data[0]["type"] == "Device" + assert response_data[0]["name"] == "name test" + assert response_data[0]["description"] == "desc" + assert response_data[0]["arm_group_label_list"] == ["test", "one"] + assert response_data[0]["other_name_list"] == ["uhh", "yes"] + + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/intervention", + json=[ + { + "type": "Device", + "name": "admin-name test", + "description": "desc", + "arm_group_label_list": ["test", "one"], + "other_name_list": ["uhh", "yes"], + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_intervention_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["type"] == "Device" + assert admin_response_data[0]["name"] == "admin-name test" + assert admin_response_data[0]["description"] == "desc" + assert admin_response_data[0]["arm_group_label_list"] == ["test", "one"] + assert admin_response_data[0]["other_name_list"] == ["uhh", "yes"] + + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/intervention", + json=[ + { + "type": "Device", + "name": "editor-name test", + "description": "desc", + "arm_group_label_list": ["test", "one"], + "other_name_list": ["uhh", "yes"], + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_intervention_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["type"] == "Device" + assert editor_response_data[0]["name"] == "editor-name test" + assert editor_response_data[0]["description"] == "desc" + assert editor_response_data[0]["arm_group_label_list"] == ["test", "one"] + assert editor_response_data[0]["other_name_list"] == ["uhh", "yes"] + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/intervention", + json=[ + { + "type": "Device", + "name": "viewer-name test", + "description": "desc", + "arm_group_label_list": ["test", "one"], + "other_name_list": ["uhh", "yes"], + } + ], + ) - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/identification") - # print(response_get.data) - assert len(json.loads(response_get.data)["secondary"]) == 0 + assert viewer_response.status_code == 403 -# ------------------- INTERVENTION METADATA ------------------- # -def test_get_intervention_metadata(_logged_in_client): +def test_get_intervention_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (GET) THEN check that the response is valid and retrieves the intervention metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.get(f"/study/{study_id}/metadata/intervention") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/intervention") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/intervention") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/intervention") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 - -def test_post_intervention_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (POST) - THEN check that the response is valid and creates the intervention metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/intervention", - json=[ - { - "type": "Device", - "name": "name test", - "description": "desc", - "arm_group_label_list": ["test", "one"], - "other_name_list": ["uhh", "yes"], - } - ], - ) - - assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_intervention_id = response_data[0]["id"] + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) assert response_data[0]["type"] == "Device" assert response_data[0]["name"] == "name test" assert response_data[0]["description"] == "desc" assert response_data[0]["arm_group_label_list"] == ["test", "one"] assert response_data[0]["other_name_list"] == ["uhh", "yes"] - - -def test_delete_intervention_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and link ID - WHEN the '/study/{study_id}/metadata/intervention/{intervention_id}' endpoint is requested (DELETE) - THEN check that the response is valid and deletes the link metadata - """ + assert response_data[1]["type"] == "Device" + assert response_data[1]["name"] == "admin-name test" + assert response_data[1]["description"] == "desc" + assert response_data[1]["arm_group_label_list"] == ["test", "one"] + assert response_data[1]["other_name_list"] == ["uhh", "yes"] + assert response_data[2]["type"] == "Device" + assert response_data[2]["name"] == "editor-name test" + assert response_data[2]["description"] == "desc" + assert response_data[2]["arm_group_label_list"] == ["test", "one"] + assert response_data[2]["other_name_list"] == ["uhh", "yes"] + + assert admin_response_data[0]["type"] == "Device" + assert admin_response_data[0]["name"] == "name test" + assert admin_response_data[0]["description"] == "desc" + assert admin_response_data[0]["arm_group_label_list"] == ["test", "one"] + assert admin_response_data[0]["other_name_list"] == ["uhh", "yes"] + assert admin_response_data[1]["type"] == "Device" + assert admin_response_data[1]["name"] == "admin-name test" + assert admin_response_data[1]["description"] == "desc" + assert admin_response_data[1]["arm_group_label_list"] == ["test", "one"] + assert admin_response_data[1]["other_name_list"] == ["uhh", "yes"] + assert admin_response_data[2]["type"] == "Device" + assert admin_response_data[2]["name"] == "editor-name test" + assert admin_response_data[2]["description"] == "desc" + assert admin_response_data[2]["arm_group_label_list"] == ["test", "one"] + assert admin_response_data[2]["other_name_list"] == ["uhh", "yes"] + + assert editor_response_data[0]["type"] == "Device" + assert editor_response_data[0]["name"] == "name test" + assert editor_response_data[0]["description"] == "desc" + assert editor_response_data[0]["arm_group_label_list"] == ["test", "one"] + assert editor_response_data[0]["other_name_list"] == ["uhh", "yes"] + assert editor_response_data[1]["type"] == "Device" + assert editor_response_data[1]["name"] == "admin-name test" + assert editor_response_data[1]["description"] == "desc" + assert editor_response_data[1]["arm_group_label_list"] == ["test", "one"] + assert editor_response_data[1]["other_name_list"] == ["uhh", "yes"] + assert editor_response_data[2]["type"] == "Device" + assert editor_response_data[2]["name"] == "editor-name test" + assert editor_response_data[2]["description"] == "desc" + assert editor_response_data[2]["arm_group_label_list"] == ["test", "one"] + assert editor_response_data[2]["other_name_list"] == ["uhh", "yes"] + + assert viewer_response_data[0]["type"] == "Device" + assert viewer_response_data[0]["name"] == "name test" + assert viewer_response_data[0]["description"] == "desc" + assert viewer_response_data[0]["arm_group_label_list"] == ["test", "one"] + assert viewer_response_data[0]["other_name_list"] == ["uhh", "yes"] + assert viewer_response_data[1]["type"] == "Device" + assert viewer_response_data[1]["name"] == "admin-name test" + assert viewer_response_data[1]["description"] == "desc" + assert viewer_response_data[1]["arm_group_label_list"] == ["test", "one"] + assert viewer_response_data[1]["other_name_list"] == ["uhh", "yes"] + assert viewer_response_data[2]["type"] == "Device" + assert viewer_response_data[2]["name"] == "editor-name test" + assert viewer_response_data[2]["description"] == "desc" + assert viewer_response_data[2]["arm_group_label_list"] == ["test", "one"] + assert viewer_response_data[2]["other_name_list"] == ["uhh", "yes"] + + +def test_delete_intervention_metadata(clients): + """ + Given a Flask application configured for testing, study ID, dataset ID and intervention ID + WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (DELETE) + THEN check that the response is valid and deletes the intervention metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore intervention_id = pytest.global_intervention_id + a_intervention_id = pytest.global_intervention_id_admin + e_intervention_id = pytest.global_intervention_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/intervention/{intervention_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/metadata/intervention/{intervention_id}" ) - assert response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/intervention") + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/intervention/{a_intervention_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/intervention/{e_intervention_id}" + ) - assert len(json.loads(response_get.data)) == 0 + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- IPD SHARING METADATA ------------------- # -def test_get_ipdsharing_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (GET) - THEN check that the response is valid and retrieves the ipdsharing metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/ipdsharing") - - assert response.status_code == 200 - - -def test_put_ipdsharing_metadata(_logged_in_client): +def test_put_ipdsharing_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (PUT) THEN check that the response is valid and updates the ipdsharing metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -647,33 +1963,152 @@ def test_put_ipdsharing_metadata(_logged_in_client): assert response_data["ipd_sharing_access_criteria"] == "Study Protocol" assert response_data["ipd_sharing_url"] == "1" + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/ipdsharing", + json={ + "ipd_sharing": "Yes", + "ipd_sharing_description": "admin-yes", + "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], + "ipd_sharing_time_frame": "uh", + "ipd_sharing_access_criteria": "Study Protocol", + "ipd_sharing_url": "1", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- LINK METADATA ------------------- # -def test_get_link_metadata(_logged_in_client): + assert admin_response_data["ipd_sharing"] == "Yes" + assert admin_response_data["ipd_sharing_description"] == "admin-yes" + assert admin_response_data["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + assert admin_response_data["ipd_sharing_time_frame"] == "uh" + assert admin_response_data["ipd_sharing_access_criteria"] == "Study Protocol" + assert admin_response_data["ipd_sharing_url"] == "1" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/ipdsharing", + json={ + "ipd_sharing": "Yes", + "ipd_sharing_description": "editor-yes", + "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], + "ipd_sharing_time_frame": "uh", + "ipd_sharing_access_criteria": "Study Protocol", + "ipd_sharing_url": "1", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["ipd_sharing"] == "Yes" + assert editor_response_data["ipd_sharing_description"] == "editor-yes" + assert editor_response_data["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + assert editor_response_data["ipd_sharing_time_frame"] == "uh" + assert editor_response_data["ipd_sharing_access_criteria"] == "Study Protocol" + assert editor_response_data["ipd_sharing_url"] == "1" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/metadata/ipdsharing", + json={ + "ipd_sharing": "Yes", + "ipd_sharing_description": "viewer-yes", + "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], + "ipd_sharing_time_frame": "uh", + "ipd_sharing_access_criteria": "Study Protocol", + "ipd_sharing_url": "1", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_ipdsharing_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/link' endpoint is requested (GET) - THEN check that the response is valid and retrieves the link metadata + WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (GET) + THEN check that the response is valid and retrieves the ipdsharing metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/link") + response = _logged_in_client.get(f"/study/{study_id}/metadata/ipdsharing") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/ipdsharing") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/ipdsharing") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/ipdsharing") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["ipd_sharing"] == "Yes" + assert response_data["ipd_sharing_description"] == "editor-yes" + assert response_data["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + assert response_data["ipd_sharing_time_frame"] == "uh" + assert response_data["ipd_sharing_access_criteria"] == "Study Protocol" + assert response_data["ipd_sharing_url"] == "1" + + assert admin_response_data["ipd_sharing"] == "Yes" + assert admin_response_data["ipd_sharing_description"] == "editor-yes" + assert admin_response_data["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + assert admin_response_data["ipd_sharing_time_frame"] == "uh" + assert admin_response_data["ipd_sharing_access_criteria"] == "Study Protocol" + assert admin_response_data["ipd_sharing_url"] == "1" + + assert editor_response_data["ipd_sharing"] == "Yes" + assert editor_response_data["ipd_sharing_description"] == "editor-yes" + assert editor_response_data["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + assert editor_response_data["ipd_sharing_time_frame"] == "uh" + assert editor_response_data["ipd_sharing_access_criteria"] == "Study Protocol" + assert editor_response_data["ipd_sharing_url"] == "1" + + assert viewer_response_data["ipd_sharing"] == "Yes" + assert viewer_response_data["ipd_sharing_description"] == "editor-yes" + assert viewer_response_data["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + assert viewer_response_data["ipd_sharing_time_frame"] == "uh" + assert viewer_response_data["ipd_sharing_access_criteria"] == "Study Protocol" + assert viewer_response_data["ipd_sharing_url"] == "1" -def test_post_link_metadata(_logged_in_client): +# ------------------- LINK METADATA ------------------- # +def test_post_link_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/link' endpoint is requested (POST) THEN check that the response is valid and creates the link metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( f"/study/{study_id}/metadata/link", json=[{"url": "google.com", "title": "google link"}], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -682,45 +2117,130 @@ def test_post_link_metadata(_logged_in_client): assert response_data[0]["url"] == "google.com" assert response_data[0]["title"] == "google link" + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/link", + json=[{"url": "admin-google.com", "title": "admin-google link"}], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) -def test_delete_link_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID and link ID - WHEN the '/study/{study_id}/metadata/link/{link_id}' endpoint is requested (DELETE) - THEN check that the response is valid and deletes the link metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore - link_id = pytest.global_link_id + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_link_id_admin = admin_response_data[0]["id"] - response = _logged_in_client.delete(f"/study/{study_id}/metadata/link/{link_id}") + assert admin_response_data[0]["url"] == "admin-google.com" + assert admin_response_data[0]["title"] == "admin-google link" - assert response.status_code == 204 + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/link", + json=[{"url": "editor-google.com", "title": "editor-google link"}], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_link_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["url"] == "editor-google.com" + assert editor_response_data[0]["title"] == "editor-google link" - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/link") + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/link", + json=[{"url": "viewer-google.com", "title": "viewer-google link"}], + ) - assert len(json.loads(response_get.data)) == 0 + assert viewer_response.status_code == 403 -# ------------------- LOCATION METADATA ------------------- # -def test_get_location_metadata(_logged_in_client): +def test_get_link_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/location' endpoint is requested (GET) - THEN check that the response is valid and retrieves the location metadata + WHEN the '/study/{study_id}/metadata/link' endpoint is requested (GET) + THEN check that the response is valid and retrieves the link metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/location") + response = _logged_in_client.get(f"/study/{study_id}/metadata/link") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/link") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/link") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/link") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data[0]["url"] == "google.com" + assert response_data[0]["title"] == "google link" + assert response_data[1]["url"] == "admin-google.com" + assert response_data[1]["title"] == "admin-google link" + assert response_data[2]["url"] == "editor-google.com" + assert response_data[2]["title"] == "editor-google link" + + assert admin_response_data[0]["url"] == "google.com" + assert admin_response_data[0]["title"] == "google link" + assert admin_response_data[1]["url"] == "admin-google.com" + assert admin_response_data[1]["title"] == "admin-google link" + assert admin_response_data[2]["url"] == "editor-google.com" + assert admin_response_data[2]["title"] == "editor-google link" + + assert editor_response_data[0]["url"] == "google.com" + assert editor_response_data[0]["title"] == "google link" + assert editor_response_data[1]["url"] == "admin-google.com" + assert editor_response_data[1]["title"] == "admin-google link" + assert editor_response_data[2]["url"] == "editor-google.com" + assert editor_response_data[2]["title"] == "editor-google link" + + assert viewer_response_data[0]["url"] == "google.com" + assert viewer_response_data[0]["title"] == "google link" + assert viewer_response_data[1]["url"] == "admin-google.com" + assert viewer_response_data[1]["title"] == "admin-google link" + assert viewer_response_data[2]["url"] == "editor-google.com" + assert viewer_response_data[2]["title"] == "editor-google link" + + +def test_delete_link_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and link ID + WHEN the '/study/{study_id}/metadata/link/{link_id}' endpoint is requested (DELETE) + THEN check that the response is valid and deletes the link metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + link_id = pytest.global_link_id + admin_link_id = pytest.global_link_id_admin + editor_link_id = pytest.global_link_id_editor + + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/link/{link_id}" + ) + response = _logged_in_client.delete(f"/study/{study_id}/metadata/link/{link_id}") + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/link/{admin_link_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/link/{editor_link_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_post_location_metadata(_logged_in_client): +# ------------------- LOCATION METADATA ------------------- # +def test_post_location_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/location' endpoint is requested (POST) THEN check that the response is valid and creates the location metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( @@ -736,6 +2256,8 @@ def test_post_location_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -748,106 +2270,341 @@ def test_post_location_metadata(_logged_in_client): assert response_data[0]["zip"] == "test" assert response_data[0]["country"] == "yes" + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/location", + json=[ + { + "facility": "test", + "status": "Withdrawn", + "city": "city", + "state": "ca", + "zip": "test", + "country": "yes", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_location_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["facility"] == "test" + assert admin_response_data[0]["status"] == "Withdrawn" + assert admin_response_data[0]["city"] == "city" + assert admin_response_data[0]["state"] == "ca" + assert admin_response_data[0]["zip"] == "test" + assert admin_response_data[0]["country"] == "yes" + + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/location", + json=[ + { + "facility": "editor test", + "status": "Withdrawn", + "city": "city", + "state": "ca", + "zip": "test", + "country": "yes", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_location_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["facility"] == "editor test" + assert editor_response_data[0]["status"] == "Withdrawn" + assert editor_response_data[0]["city"] == "city" + assert editor_response_data[0]["state"] == "ca" + assert editor_response_data[0]["zip"] == "test" + assert editor_response_data[0]["country"] == "yes" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/location", + json=[ + { + "facility": "viewer test", + "status": "Withdrawn", + "city": "city", + "state": "ca", + "zip": "test", + "country": "yes", + } + ], + ) + + assert viewer_response.status_code == 403 + + +def test_get_location_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/location' endpoint is requested (GET) + THEN check that the response is valid and retrieves the location metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.get(f"/study/{study_id}/metadata/location") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/location") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/location") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/location") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -def test_delete_location_metadata(_logged_in_client): + assert response_data[0]["facility"] == "test" + assert response_data[0]["status"] == "Withdrawn" + assert response_data[0]["city"] == "city" + assert response_data[0]["state"] == "ca" + assert response_data[0]["zip"] == "test" + assert response_data[0]["country"] == "yes" + assert response_data[1]["facility"] == "test" + assert response_data[1]["status"] == "Withdrawn" + assert response_data[1]["city"] == "city" + assert response_data[1]["state"] == "ca" + assert response_data[1]["zip"] == "test" + assert response_data[1]["country"] == "yes" + assert response_data[2]["facility"] == "editor test" + assert response_data[2]["status"] == "Withdrawn" + assert response_data[2]["city"] == "city" + assert response_data[2]["state"] == "ca" + assert response_data[2]["zip"] == "test" + assert response_data[2]["country"] == "yes" + + assert admin_response_data[0]["facility"] == "test" + assert admin_response_data[0]["status"] == "Withdrawn" + assert admin_response_data[0]["city"] == "city" + assert admin_response_data[0]["state"] == "ca" + assert admin_response_data[0]["zip"] == "test" + assert admin_response_data[0]["country"] == "yes" + assert admin_response_data[1]["facility"] == "test" + assert admin_response_data[1]["status"] == "Withdrawn" + assert admin_response_data[1]["city"] == "city" + assert admin_response_data[1]["state"] == "ca" + assert admin_response_data[1]["zip"] == "test" + assert admin_response_data[1]["country"] == "yes" + assert admin_response_data[2]["facility"] == "editor test" + assert admin_response_data[2]["status"] == "Withdrawn" + assert admin_response_data[2]["city"] == "city" + assert admin_response_data[2]["state"] == "ca" + assert admin_response_data[2]["zip"] == "test" + assert admin_response_data[2]["country"] == "yes" + + assert editor_response_data[0]["facility"] == "test" + assert editor_response_data[0]["status"] == "Withdrawn" + assert editor_response_data[0]["city"] == "city" + assert editor_response_data[0]["state"] == "ca" + assert editor_response_data[0]["zip"] == "test" + assert editor_response_data[0]["country"] == "yes" + assert editor_response_data[1]["facility"] == "test" + assert editor_response_data[1]["status"] == "Withdrawn" + assert editor_response_data[1]["city"] == "city" + assert editor_response_data[1]["state"] == "ca" + assert editor_response_data[1]["zip"] == "test" + assert editor_response_data[1]["country"] == "yes" + assert editor_response_data[2]["facility"] == "editor test" + assert editor_response_data[2]["status"] == "Withdrawn" + assert editor_response_data[2]["city"] == "city" + assert editor_response_data[2]["state"] == "ca" + assert editor_response_data[2]["zip"] == "test" + assert editor_response_data[2]["country"] == "yes" + + assert viewer_response_data[0]["facility"] == "test" + assert viewer_response_data[0]["status"] == "Withdrawn" + assert viewer_response_data[0]["city"] == "city" + assert viewer_response_data[0]["state"] == "ca" + assert viewer_response_data[0]["zip"] == "test" + assert viewer_response_data[0]["country"] == "yes" + assert viewer_response_data[1]["facility"] == "test" + assert viewer_response_data[1]["status"] == "Withdrawn" + assert viewer_response_data[1]["city"] == "city" + assert viewer_response_data[1]["state"] == "ca" + assert viewer_response_data[1]["zip"] == "test" + assert viewer_response_data[1]["country"] == "yes" + assert viewer_response_data[2]["facility"] == "editor test" + assert viewer_response_data[2]["status"] == "Withdrawn" + assert viewer_response_data[2]["city"] == "city" + assert viewer_response_data[2]["state"] == "ca" + assert viewer_response_data[2]["zip"] == "test" + assert viewer_response_data[2]["country"] == "yes" + + +def test_delete_location_metadata(clients): """ Given a Flask application configured for testing and a study ID and location ID WHEN the '/study/{study_id}/metadata/location/{location_id}' - endpoint is requested (DELETE) + endpoint is requested (DELETE) THEN check that the response is valid and deletes the location metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore location_id = pytest.global_location_id + admin_location_id = pytest.global_location_id_admin + editor_location_id = pytest.global_location_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/location/{location_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/metadata/location/{location_id}" ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/location/{admin_location_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/location/{editor_location_id}" + ) + assert viewer_response.status_code == 403 assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/location") - - assert len(json.loads(response_get.data)) == 0 - -# ------------------- OTHER METADATA ------------------- # -def test_get_other_metadata(_logged_in_client): +# ------------------- OVERALL-OFFICIAL METADATA ------------------- # +def test_post_overall_official_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/other' endpoint is requested (GET) - THEN check that the response is valid and retrieves the other metadata + WHEN the '/study/{study_id}/metadata/overall-official' endpoint is requested (POST) + THEN check that the response is valid and creates the overall-official metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/other") + response = _logged_in_client.post( + f"/study/{study_id}/metadata/overall-official", + json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert response.status_code == 201 + response_data = json.loads(response.data) + pytest.global_overall_official_id = response_data[0]["id"] + + assert response_data[0]["name"] == "test" + assert response_data[0]["affiliation"] == "aff" + assert response_data[0]["role"] == "Study Chair" + + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/overall-official", + json=[ + {"name": "admin-test", "affiliation": "admin-aff", "role": "Study Chair"} + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_overall_official_id_admin = admin_response_data[0]["id"] - assert response.status_code == 200 + assert admin_response_data[0]["name"] == "admin-test" + assert admin_response_data[0]["affiliation"] == "admin-aff" + assert admin_response_data[0]["role"] == "Study Chair" + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/overall-official", + json=[ + {"name": "editor-test", "affiliation": "editor-aff", "role": "Study Chair"} + ], + ) -def test_put_other_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/other' endpoint is requested (PUT) - THEN check that the response is valid and updates the other metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_overall_official_id_editor = editor_response_data[0]["id"] - response = _logged_in_client.put( - f"/study/{study_id}/metadata/other", - json={ - "oversight_has_dmc": False, - "conditions": ["c"], - "keywords": ["true", "u"], - "size": 103, - }, - ) + assert editor_response_data[0]["name"] == "editor-test" + assert editor_response_data[0]["affiliation"] == "editor-aff" + assert editor_response_data[0]["role"] == "Study Chair" - assert response.status_code == 200 - response_data = json.loads(response.data) + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/overall-official", + json=[ + {"name": "viewer-test", "affiliation": "viewer-aff", "role": "Study Chair"} + ], + ) - assert response_data["oversight_has_dmc"] is False - assert response_data["conditions"] == ["c"] - assert response_data["keywords"] == ["true", "u"] - assert response_data["size"] == 103 + assert viewer_response.status_code == 403 -# ------------------- OVERALL-OFFICIAL METADATA ------------------- # -def test_get_overall_official_metadata(_logged_in_client): +def test_get_overall_official_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/overall-official' endpoint is requested (GET) THEN check that the response is valid and retrieves the overall-official metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.get(f"/study/{study_id}/metadata/overall-official") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/overall-official") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/overall-official") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/overall-official") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 - -def test_post_overall_official_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/overall-official' endpoint is requested (POST) - THEN check that the response is valid and creates the overall-official metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/overall-official", - json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], - ) - - assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_overall_official_id = response_data[0]["id"] + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) assert response_data[0]["name"] == "test" assert response_data[0]["affiliation"] == "aff" assert response_data[0]["role"] == "Study Chair" - - -def test_delete_overall_official_metadata(_logged_in_client): + assert response_data[1]["name"] == "admin-test" + assert response_data[1]["affiliation"] == "admin-aff" + assert response_data[1]["role"] == "Study Chair" + assert response_data[2]["name"] == "editor-test" + assert response_data[2]["affiliation"] == "editor-aff" + assert response_data[2]["role"] == "Study Chair" + + assert admin_response_data[0]["name"] == "test" + assert admin_response_data[0]["affiliation"] == "aff" + assert admin_response_data[0]["role"] == "Study Chair" + assert admin_response_data[1]["name"] == "admin-test" + assert admin_response_data[1]["affiliation"] == "admin-aff" + assert admin_response_data[1]["role"] == "Study Chair" + assert admin_response_data[2]["name"] == "editor-test" + assert admin_response_data[2]["affiliation"] == "editor-aff" + assert admin_response_data[2]["role"] == "Study Chair" + + assert editor_response_data[0]["name"] == "test" + assert editor_response_data[0]["affiliation"] == "aff" + assert editor_response_data[0]["role"] == "Study Chair" + assert editor_response_data[1]["name"] == "admin-test" + assert editor_response_data[1]["affiliation"] == "admin-aff" + assert editor_response_data[1]["role"] == "Study Chair" + assert editor_response_data[2]["name"] == "editor-test" + assert editor_response_data[2]["affiliation"] == "editor-aff" + assert editor_response_data[2]["role"] == "Study Chair" + + assert viewer_response_data[0]["name"] == "test" + assert viewer_response_data[0]["affiliation"] == "aff" + assert viewer_response_data[0]["role"] == "Study Chair" + assert viewer_response_data[1]["name"] == "admin-test" + assert viewer_response_data[1]["affiliation"] == "admin-aff" + assert viewer_response_data[1]["role"] == "Study Chair" + assert viewer_response_data[2]["name"] == "editor-test" + assert viewer_response_data[2]["affiliation"] == "editor-aff" + assert viewer_response_data[2]["role"] == "Study Chair" + + +def test_delete_overall_official_metadata(clients): """ Given a Flask application configured for testing and a study ID and overall official ID @@ -855,39 +2612,39 @@ def test_delete_overall_official_metadata(_logged_in_client): endpoint is requested (DELETE) THEN check that the response is valid and deletes the overall-official metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore overall_official_id = pytest.global_overall_official_id + oo_admin_id = pytest.global_overall_official_id_admin + oo_editor_id = pytest.global_overall_official_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/overall-official/{overall_official_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/metadata/overall-official/{overall_official_id}" ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/overall-official/{oo_admin_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/overall-official/{oo_editor_id}" + ) + assert viewer_response.status_code == 403 assert response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/overall-official") - - assert len(json.loads(response_get.data)) == 0 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- OVERSIGHT METADATA ------------------- # -def test_get_oversight_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/oversight' endpoint is requested (GET) - THEN check that the response is valid and retrieves the oversight metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/oversight") - - assert response.status_code == 200 - - -def test_put_oversight_metadata(_logged_in_client): +def test_put_oversight_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/oversight' endpoint is requested (PUT) THEN check that the response is valid and updates the oversight metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -899,27 +2656,68 @@ def test_put_oversight_metadata(_logged_in_client): assert response_data is True + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": False} + ) -# ------------------- REFERENCE METADATA ------------------- # -def test_get_reference_metadata(_logged_in_client): + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data is False + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": True} + ) + + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data is True + + viewer_response = _viewer_client.put( + f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": False} + ) + + assert viewer_response.status_code == 403 + + +def test_get_oversight_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (GET) - THEN check that the response is valid and retrieves the reference metadata + WHEN the '/study/{study_id}/metadata/oversight' endpoint is requested (GET) + THEN check that the response is valid and retrieves the oversight metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/reference") + response = _logged_in_client.get(f"/study/{study_id}/metadata/oversight") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/oversight") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/oversight") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/oversight") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["oversight"] is True + assert admin_response_data["oversight"] is True + assert editor_response_data["oversight"] is True + assert viewer_response_data["oversight"] is True -def test_post_reference_metadata(_logged_in_client): +# ------------------- REFERENCE METADATA ------------------- # +def test_post_reference_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (POST) THEN check that the response is valid and creates the reference metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( @@ -932,6 +2730,8 @@ def test_post_reference_metadata(_logged_in_client): } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 201 response_data = json.loads(response.data) @@ -941,8 +2741,126 @@ def test_post_reference_metadata(_logged_in_client): assert response_data[0]["type"] == "Yes" assert response_data[0]["citation"] == "reference citation" + admin_response = _admin_client.post( + f"/study/{study_id}/metadata/reference", + json=[ + { + "identifier": "admin-reference identifier", + "type": "Yes", + "citation": "admin-reference citation", + } + ], + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 + admin_response_data = json.loads(admin_response.data) + pytest.global_reference_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "admin-reference identifier" + assert admin_response_data[0]["type"] == "Yes" + assert admin_response_data[0]["citation"] == "admin-reference citation" + + editor_response = _editor_client.post( + f"/study/{study_id}/metadata/reference", + json=[ + { + "identifier": "editor-reference identifier", + "type": "Yes", + "citation": "editor-reference citation", + } + ], + ) + + assert editor_response.status_code == 201 + editor_response_data = json.loads(editor_response.data) + pytest.global_reference_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["identifier"] == "editor-reference identifier" + assert editor_response_data[0]["type"] == "Yes" + assert editor_response_data[0]["citation"] == "editor-reference citation" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/reference", + json=[ + { + "identifier": "viewer-reference identifier", + "type": "Yes", + "citation": "editor-reference citation", + } + ], + ) + + assert viewer_response.status_code == 403 + + +def test_get_reference_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (GET) + THEN check that the response is valid and retrieves the reference metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.get(f"/study/{study_id}/metadata/reference") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/reference") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/reference") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/reference") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -def test_delete_reference_metadata(_logged_in_client): + assert response_data[0]["identifier"] == "reference identifier" + assert response_data[0]["type"] == "Yes" + assert response_data[0]["citation"] == "reference citation" + assert response_data[1]["identifier"] == "admin-reference identifier" + assert response_data[1]["type"] == "Yes" + assert response_data[1]["citation"] == "admin-reference citation" + assert response_data[2]["identifier"] == "editor-reference identifier" + assert response_data[2]["type"] == "Yes" + assert response_data[2]["citation"] == "editor-reference citation" + + assert admin_response_data[0]["identifier"] == "reference identifier" + assert admin_response_data[0]["type"] == "Yes" + assert admin_response_data[0]["citation"] == "reference citation" + assert admin_response_data[1]["identifier"] == "admin-reference identifier" + assert admin_response_data[1]["type"] == "Yes" + assert admin_response_data[1]["citation"] == "admin-reference citation" + assert admin_response_data[2]["identifier"] == "editor-reference identifier" + assert admin_response_data[2]["type"] == "Yes" + assert admin_response_data[2]["citation"] == "editor-reference citation" + + assert editor_response_data[0]["identifier"] == "reference identifier" + assert editor_response_data[0]["type"] == "Yes" + assert editor_response_data[0]["citation"] == "reference citation" + assert editor_response_data[1]["identifier"] == "admin-reference identifier" + assert editor_response_data[1]["type"] == "Yes" + assert editor_response_data[1]["citation"] == "admin-reference citation" + assert editor_response_data[2]["identifier"] == "editor-reference identifier" + assert editor_response_data[2]["type"] == "Yes" + assert editor_response_data[2]["citation"] == "editor-reference citation" + + assert viewer_response_data[0]["identifier"] == "reference identifier" + assert viewer_response_data[0]["type"] == "Yes" + assert viewer_response_data[0]["citation"] == "reference citation" + assert viewer_response_data[1]["identifier"] == "admin-reference identifier" + assert viewer_response_data[1]["type"] == "Yes" + assert viewer_response_data[1]["citation"] == "admin-reference citation" + assert viewer_response_data[2]["identifier"] == "editor-reference identifier" + assert viewer_response_data[2]["type"] == "Yes" + assert viewer_response_data[2]["citation"] == "editor-reference citation" + + +def test_delete_reference_metadata(clients): """ Given a Flask application configured for testing and a study ID and reference ID @@ -950,39 +2868,39 @@ def test_delete_reference_metadata(_logged_in_client): endpoint is requested (DELETE) THEN check that the response is valid and deletes the reference metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore reference_id = pytest.global_reference_id + admin_reference_id = pytest.global_reference_id_admin + editor_reference_id = pytest.global_reference_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/reference/{reference_id}" + ) response = _logged_in_client.delete( f"/study/{study_id}/metadata/reference/{reference_id}" ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/reference/{admin_reference_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/reference/{editor_reference_id}" + ) + assert viewer_response.status_code == 403 assert response.status_code == 204 - response_get = _logged_in_client.get(f"/study/{study_id}/metadata/reference") - - assert len(json.loads(response_get.data)) == 0 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- SPONSORS METADATA ------------------- # -def test_get_sponsors_metadata(_logged_in_client): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (GET) - THEN check that the response is valid and retrieves the sponsors metadata - """ - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/sponsors") - - assert response.status_code == 200 - - -def test_put_sponsors_metadata(_logged_in_client): +def test_put_sponsors_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (PUT) THEN check that the response is valid and updates the sponsors metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -1008,27 +2926,173 @@ def test_put_sponsors_metadata(_logged_in_client): ) assert response_data["lead_sponsor_name"] == "sponsor name" + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/sponsors", + json={ + "responsible_party_type": "Sponsor", + "responsible_party_investigator_name": "admin sponsor name", + "responsible_party_investigator_title": "admin sponsor title", + "responsible_party_investigator_affiliation": "admin sponsor affiliation", + "lead_sponsor_name": "admin sponsor name", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) -# ------------------- STATUS METADATA ------------------- # -def test_get_status_metadata(_logged_in_client): + assert admin_response_data["responsible_party_type"] == "Sponsor" + assert ( + admin_response_data["responsible_party_investigator_name"] + == "admin sponsor name" + ) + assert ( + admin_response_data["responsible_party_investigator_title"] + == "admin sponsor title" + ) + # pylint: disable=line-too-long + assert ( + admin_response_data["responsible_party_investigator_affiliation"] + == "admin sponsor affiliation" + ) + # pylint: disable=line-too-long + assert ( + admin_response_data["responsible_party_investigator_affiliation"] + == "admin sponsor affiliation" + ) + assert admin_response_data["lead_sponsor_name"] == "admin sponsor name" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/sponsors", + json={ + "responsible_party_type": "Sponsor", + "responsible_party_investigator_name": "editor sponsor name", + "responsible_party_investigator_title": "editor sponsor title", + "responsible_party_investigator_affiliation": "editor sponsor affiliation", + "lead_sponsor_name": "editor sponsor name", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["responsible_party_investigator_name"] + == "editor sponsor name" + ) + assert ( + editor_response_data["responsible_party_investigator_title"] + == "editor sponsor title" + ) + assert ( + editor_response_data["responsible_party_investigator_affiliation"] + == "editor sponsor affiliation" + ) # noqa: E501 + assert editor_response_data["lead_sponsor_name"] == "editor sponsor name" + + viewer_response = _viewer_client.put( + f"/study/{study_id}/metadata/sponsors", + json={ + "responsible_party_type": "Sponsor", + "responsible_party_investigator_name": "viewer sponsor name", + "responsible_party_investigator_title": "viewer sponsor title", + "responsible_party_investigator_affiliation": "viewer sponsor affiliation", + "lead_sponsor_name": "viewer sponsor name", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_sponsors_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/status' endpoint is requested (GET) - THEN check that the response is valid and retrieves the status metadata + WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (GET) + THEN check that the response is valid and retrieves the sponsors metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/status") + response = _logged_in_client.get(f"/study/{study_id}/metadata/sponsors") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/sponsors") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/sponsors") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/sponsors") assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["responsible_party_type"] == "Sponsor" + assert response_data["responsible_party_investigator_name"] == "editor sponsor name" + assert ( + response_data["responsible_party_investigator_title"] == "editor sponsor title" + ) + assert ( + response_data["responsible_party_investigator_affiliation"] + == "editor sponsor affiliation" + ) + assert response_data["lead_sponsor_name"] == "editor sponsor name" + + assert admin_response_data["responsible_party_type"] == "Sponsor" + assert ( + admin_response_data["responsible_party_investigator_name"] + == "editor sponsor name" + ) + assert ( + admin_response_data["responsible_party_investigator_title"] + == "editor sponsor title" + ) + assert ( + admin_response_data["responsible_party_investigator_affiliation"] + == "editor sponsor affiliation" + ) + assert admin_response_data["lead_sponsor_name"] == "editor sponsor name" + + assert editor_response_data["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["responsible_party_investigator_name"] + == "editor sponsor name" + ) + assert ( + editor_response_data["responsible_party_investigator_title"] + == "editor sponsor title" + ) + assert ( + editor_response_data["responsible_party_investigator_affiliation"] + == "editor sponsor affiliation" + ) + assert editor_response_data["lead_sponsor_name"] == "editor sponsor name" + + assert viewer_response_data["responsible_party_type"] == "Sponsor" + assert ( + viewer_response_data["responsible_party_investigator_name"] + == "editor sponsor name" + ) + assert ( + viewer_response_data["responsible_party_investigator_title"] + == "editor sponsor title" + ) + assert ( + viewer_response_data["responsible_party_investigator_affiliation"] + == "editor sponsor affiliation" + ) + assert viewer_response_data["lead_sponsor_name"] == "editor sponsor name" -def test_put_status_metadata(_logged_in_client): +# ------------------- STATUS METADATA ------------------- # +def test_put_status_metadata(clients): """ Given a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/status' endpoint is requested (PUT) THEN check that the response is valid and updates the status metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( @@ -1052,3 +3116,100 @@ def test_put_status_metadata(_logged_in_client): assert response_data["start_date_type"] == "Actual" assert response_data["completion_date"] == "2023-11-16 00:00:00" assert response_data["completion_date_type"] == "Actual" + + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/status", + json={ + "overall_status": "Withdrawn", + "why_stopped": "admin-test", + "start_date": "test", + "start_date_type": "Actual", + "completion_date": "admin date", + "completion_date_type": "Actual", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["overall_status"] == "Withdrawn" + assert admin_response_data["why_stopped"] == "admin-test" + assert admin_response_data["start_date"] == "test" + assert admin_response_data["start_date_type"] == "Actual" + assert admin_response_data["completion_date"] == "admin date" + assert admin_response_data["completion_date_type"] == "Actual" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/status", + json={ + "overall_status": "Withdrawn", + "why_stopped": "editor-test", + "start_date": "2023-11-15 00:00:00", + "start_date_type": "Actual", + "completion_date": "completion date", + "completion_date_type": "Actual", + }, + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data["overall_status"] == "Withdrawn" + assert editor_response_data["why_stopped"] == "editor-test" + assert editor_response_data["start_date"] == "2023-11-15 00:00:00" + assert editor_response_data["start_date_type"] == "Actual" + assert editor_response_data["completion_date"] == "completion date" + assert editor_response_data["completion_date_type"] == "Actual" + + +def test_get_status_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/status' endpoint is requested (GET) + THEN check that the response is valid and retrieves the status metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.get(f"/study/{study_id}/metadata/status") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/status") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/status") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/status") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["overall_status"] == "Withdrawn" + assert response_data["why_stopped"] == "editor-test" + assert response_data["start_date"] == "2023-11-15 00:00:00" + assert response_data["start_date_type"] == "Actual" + assert response_data["completion_date"] == "completion date" + assert response_data["completion_date_type"] == "Actual" + + assert admin_response_data["overall_status"] == "Withdrawn" + assert admin_response_data["why_stopped"] == "editor-test" + assert admin_response_data["start_date"] == "2023-11-15 00:00:00" + assert admin_response_data["start_date_type"] == "Actual" + assert admin_response_data["completion_date"] == "completion date" + assert admin_response_data["completion_date_type"] == "Actual" + + assert editor_response_data["overall_status"] == "Withdrawn" + assert editor_response_data["why_stopped"] == "editor-test" + assert editor_response_data["start_date"] == "2023-11-15 00:00:00" + assert editor_response_data["start_date_type"] == "Actual" + assert editor_response_data["completion_date"] == "completion date" + assert editor_response_data["completion_date_type"] == "Actual" + + assert viewer_response_data["overall_status"] == "Withdrawn" + assert viewer_response_data["why_stopped"] == "editor-test" + assert viewer_response_data["start_date"] == "2023-11-15 00:00:00" + assert viewer_response_data["start_date_type"] == "Actual" + assert viewer_response_data["completion_date"] == "completion date" + assert viewer_response_data["completion_date_type"] == "Actual" diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index 89d50e78..e4ac4ce0 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -4,20 +4,231 @@ import pytest + # ------------------- VERSION ADD ------------------- # +def test_post_dataset_version(clients): + """ + Given a Flask application configured for testing, study ID and a dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/version' + endpoint is requested (POST) + Then check that the response is valid and creates a dataset version + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/version", + json={ + "title": "Dataset Version 1.0", + "published": False, + "doi": "doi:test", + "changelog": "changelog testing here", + }, + ) + + assert response.status_code == 201 + response_data = json.loads(response.data) + pytest.global_dataset_version_id = response_data["id"] + + assert response_data["title"] == "Dataset Version 1.0" + assert response_data["published"] is False + assert response_data["doi"] == "doi:test" + assert response_data["changelog"] == "changelog testing here" + + +def test_get_all_dataset_versions(clients): + """ + Given a Flask application configured for testing, study ID and a dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/version' endpoint is requested (GET) + Then check that the response is valid and retrieves all dataset versions + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version", + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version", + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version", + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version", + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + + assert len(response_data) == 1 + assert len(admin_response_data) == 1 + assert len(editor_response_data) == 1 + + assert response_data[0]["title"] == "Dataset Version 1.0" + assert response_data[0]["published"] is False + assert response_data[0]["doi"] == "doi:test" + assert response_data[0]["changelog"] == "changelog testing here" + + assert admin_response_data[0]["title"] == "Dataset Version 1.0" + assert admin_response_data[0]["published"] is False + assert admin_response_data[0]["doi"] == "doi:test" + assert admin_response_data[0]["changelog"] == "changelog testing here" + + assert editor_response_data[0]["title"] == "Dataset Version 1.0" + assert editor_response_data[0]["published"] is False + assert editor_response_data[0]["doi"] == "doi:test" + assert editor_response_data[0]["changelog"] == "changelog testing here" + + +def test_get_dataset_version(clients): + """ + Given a Flask application configured for testing, study ID, dataset ID and version ID + When the '/study/{study_id}/dataset/{dataset_id}/version/{version_id}' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset version + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + version_id = pytest.global_dataset_version_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + assert response_data["title"] == "Dataset Version 1.0" + assert response_data["published"] is False + assert response_data["doi"] == "doi:test" + assert response_data["changelog"] == "changelog testing here" -def test_get_version_study_metadata(_logged_in_client): + assert admin_response_data["title"] == "Dataset Version 1.0" + assert admin_response_data["published"] is False + assert admin_response_data["doi"] == "doi:test" + assert admin_response_data["changelog"] == "changelog testing here" + + assert editor_response_data["title"] == "Dataset Version 1.0" + assert editor_response_data["published"] is False + assert editor_response_data["doi"] == "doi:test" + assert editor_response_data["changelog"] == "changelog testing here" + + +def test_put_dataset_version(clients): + """ + Given a Flask application configured for testing, study ID, dataset ID and version ID + When the '/study/{study_id}/dataset/{dataset_id}/version/{version_id}' + is requested (PUT) + Then check that the response is valid and updates the dataset version + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] + dataset_id = pytest.global_dataset_id + version_id = pytest.global_dataset_version_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + json={ + "title": "Dataset Version 2.0", + "changelog": "Updating the changelog", + "published": False, + "doi": "doi:test123", + "readme": "readme testing here", + }, + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + print(response_data) + + assert response_data["title"] == "Dataset Version 2.0" + assert response_data["changelog"] == "Updating the changelog" + assert response_data["doi"] == "doi:test123" + assert response_data["readme"] == "" + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + json={ + "title": "Dataset Version 3.0", + "changelog": "Changelog modified by admin", + "published": False, + "doi": "doi:test", + "readme": "readme modified by editor", + }, + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data["title"] == "Dataset Version 3.0" + assert admin_response_data["changelog"] == "Changelog modified by admin" + assert admin_response_data["published"] is False + assert admin_response_data["doi"] == "doi:test" + assert admin_response_data["readme"] == "" + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + json={ + "title": "Dataset Version 4.0", + "changelog": "Changelog modified by editor", + "published": False, + "doi": "doi:test", + "readme": "readme modified by editor", + }, + ) + + assert editor_response.status_code == 403 + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", + json={ + "title": "Dataset Version 5.0", + "changelog": "Changelog modified by viewer", + "published": False, + "doi": "test:doi", + "readme": "readme modified by viewer", + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_version_study_metadata(clients): """ Given a Flask application configured for testing - WHEN the /study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata endpoint is requested (GET) + WHEN the /study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata + endpoint is requested (GET) THEN check that the response is valid and retrieves the design metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore - _logged_in_client.post( + arm_response = _logged_in_client.post( f"/study/{study_id}/metadata/arm", json=[ { @@ -28,7 +239,7 @@ def test_get_version_study_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + avail_ipd_response = _logged_in_client.post( f"/study/{study_id}/metadata/available-ipd", json=[ { @@ -39,7 +250,7 @@ def test_get_version_study_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + cc_response = _logged_in_client.post( f"/study/{study_id}/metadata/central-contact", json=[ { @@ -52,7 +263,7 @@ def test_get_version_study_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + location_response = _logged_in_client.post( f"/study/{study_id}/metadata/location", json=[ { @@ -65,7 +276,7 @@ def test_get_version_study_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + id_response = _logged_in_client.post( f"/study/{study_id}/metadata/identification", json={ "primary": { @@ -84,7 +295,7 @@ def test_get_version_study_metadata(_logged_in_client): ], }, ) - _logged_in_client.post( + intervention_response = _logged_in_client.post( f"/study/{study_id}/metadata/intervention", json=[ { @@ -96,15 +307,15 @@ def test_get_version_study_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + link_response = _logged_in_client.post( f"/study/{study_id}/metadata/link", json=[{"url": "google.com", "title": "google link"}], ) - _logged_in_client.post( + of_response = _logged_in_client.post( f"/study/{study_id}/metadata/overall-official", json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], ) - _logged_in_client.post( + reference_response = _logged_in_client.post( f"/study/{study_id}/metadata/reference", json=[ { @@ -115,12 +326,37 @@ def test_get_version_study_metadata(_logged_in_client): ], ) + assert arm_response.status_code == 201 + assert avail_ipd_response.status_code == 201 + assert cc_response.status_code == 201 + assert location_response.status_code == 201 + assert id_response.status_code == 201 + assert intervention_response.status_code == 201 + assert link_response.status_code == 201 + assert of_response.status_code == 201 + assert reference_response.status_code == 201 + response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" ) - response_data = json.loads(response.data) - # print(response_data) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" + ) + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + assert response_data["available_ipd"][0]["identifier"] == "identifier1" assert response_data["available_ipd"][0]["url"] == "google.com" assert response_data["arms"][0]["label"] == "Label1" @@ -142,8 +378,8 @@ def test_get_version_study_metadata(_logged_in_client): assert response_data["references"][0]["identifier"] == "reference identifier" assert response_data["references"][0]["citation"] == "reference citation" - assert response_data["description"]["brief_summary"] == "brief_summary" - assert response_data["design"]["design_allocation"] == "dfasdfasd" + assert response_data["description"]["brief_summary"] == "editor-brief_summary" + assert response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert response_data["design"]["study_type"] == "Interventional" assert response_data["design"]["design_intervention_model"] == "Treatment" @@ -174,12 +410,18 @@ def test_get_version_study_metadata(_logged_in_client): assert response_data["status"]["overall_status"] == "Withdrawn" assert response_data["status"]["start_date"] == "2023-11-15 00:00:00" assert ( - response_data["sponsors"]["responsible_party_investigator_name"] == "party name" + response_data["sponsors"]["responsible_party_investigator_name"] + == "editor sponsor name" ) assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" - assert response_data["sponsors"]["lead_sponsor_name"] == "sponsor name" - assert response_data["collaborators"] == ["collaborator1123"] - assert response_data["conditions"] == ["c"] + assert response_data["sponsors"]["lead_sponsor_name"] == "editor sponsor name" + assert response_data["collaborators"] == ["editor-collaborator1123"] + assert response_data["conditions"] == [ + "true", + "editor-conditions string", + "editor-keywords string", + "editor-size string", + ] assert response_data["ipd_sharing"]["ipd_sharing"] == "Yes" assert response_data["ipd_sharing"]["ipd_sharing_info_type_list"] == [ @@ -189,18 +431,177 @@ def test_get_version_study_metadata(_logged_in_client): assert response_data["oversight"] is True + assert admin_response_data["available_ipd"][0]["identifier"] == "identifier1" + assert admin_response_data["available_ipd"][0]["url"] == "google.com" + assert admin_response_data["arms"][0]["label"] == "Label1" + + assert admin_response_data["contacts"][0]["name"] == "central-contact" + assert admin_response_data["contacts"][0]["affiliation"] == "affiliation" + + assert admin_response_data["secondary_identifiers"][0]["identifier"] == "test" + assert admin_response_data["secondary_identifiers"][0]["identifier_type"] == "test" + assert admin_response_data["interventions"][0]["type"] == "Device" + assert admin_response_data["interventions"][0]["name"] == "name test" + assert admin_response_data["links"][0]["title"] == "google link" + assert admin_response_data["links"][0]["url"] == "google.com" + assert admin_response_data["locations"][0]["country"] == "yes" + assert admin_response_data["locations"][0]["facility"] == "test" + assert admin_response_data["overall_officials"][0]["name"] == "test" + assert admin_response_data["overall_officials"][0]["role"] == "Study Chair" + assert admin_response_data["overall_officials"][0]["affiliation"] == "aff" + assert admin_response_data["references"][0]["identifier"] == "reference identifier" + assert admin_response_data["references"][0]["citation"] == "reference citation" + + assert admin_response_data["description"]["brief_summary"] == "editor-brief_summary" + assert admin_response_data["design"]["design_allocation"] == "editor-dfasdfasd" -def test_get_version_dataset_metadata(_logged_in_client): + assert admin_response_data["design"]["study_type"] == "Interventional" + assert admin_response_data["design"]["design_intervention_model"] == "Treatment" + assert ( + admin_response_data["design"]["design_primary_purpose"] == "Parallel Assignment" + ) + assert admin_response_data["design"]["design_masking"] == "Double" + assert admin_response_data["design"]["design_masking_description"] == "tewsfdasf" + assert admin_response_data["design"]["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert admin_response_data["design"]["phase_list"] == ["N/A"] + assert admin_response_data["design"]["enrollment_count"] == 3 + assert admin_response_data["design"]["enrollment_type"] == "Actual" + assert admin_response_data["design"]["number_arms"] == 2 + assert admin_response_data["design"]["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert admin_response_data["design"]["design_time_perspective_list"] == ["Other"] + assert admin_response_data["design"]["bio_spec_retention"] == "None Retained" + assert admin_response_data["design"]["target_duration"] == "rewrwe" + assert admin_response_data["design"]["number_groups_cohorts"] == 1 + assert admin_response_data["eligibility"]["gender"] == "All" + assert admin_response_data["eligibility"]["gender_based"] == "Yes" + assert admin_response_data["eligibility"]["minimum_age_value"] == 18 + assert admin_response_data["primary_identifier"]["identifier"] == "test" + assert admin_response_data["primary_identifier"]["identifier_type"] == "test" + assert admin_response_data["status"]["overall_status"] == "Withdrawn" + assert admin_response_data["status"]["start_date"] == "2023-11-15 00:00:00" + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_name"] + == "editor sponsor name" + ) + assert admin_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert admin_response_data["sponsors"]["lead_sponsor_name"] == "editor sponsor name" + assert admin_response_data["collaborators"] == ["editor-collaborator1123"] + assert admin_response_data["conditions"] == [ + "true", + "editor-conditions string", + "editor-keywords string", + "editor-size string", + ] + + assert admin_response_data["ipd_sharing"]["ipd_sharing"] == "Yes" + assert admin_response_data["ipd_sharing"]["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + + assert admin_response_data["oversight"] is True + + assert editor_response_data["available_ipd"][0]["identifier"] == "identifier1" + assert editor_response_data["available_ipd"][0]["url"] == "google.com" + assert editor_response_data["arms"][0]["label"] == "Label1" + + assert editor_response_data["contacts"][0]["name"] == "central-contact" + assert editor_response_data["contacts"][0]["affiliation"] == "affiliation" + + assert editor_response_data["secondary_identifiers"][0]["identifier"] == "test" + assert editor_response_data["secondary_identifiers"][0]["identifier_type"] == "test" + assert editor_response_data["interventions"][0]["type"] == "Device" + assert editor_response_data["interventions"][0]["name"] == "name test" + assert editor_response_data["links"][0]["title"] == "google link" + assert editor_response_data["links"][0]["url"] == "google.com" + assert editor_response_data["locations"][0]["country"] == "yes" + assert editor_response_data["locations"][0]["facility"] == "test" + assert editor_response_data["overall_officials"][0]["name"] == "test" + assert editor_response_data["overall_officials"][0]["role"] == "Study Chair" + assert editor_response_data["overall_officials"][0]["affiliation"] == "aff" + assert editor_response_data["references"][0]["identifier"] == "reference identifier" + assert editor_response_data["references"][0]["citation"] == "reference citation" + + assert ( + editor_response_data["description"]["brief_summary"] == "editor-brief_summary" + ) + assert editor_response_data["design"]["design_allocation"] == "editor-dfasdfasd" + + assert editor_response_data["design"]["study_type"] == "Interventional" + assert editor_response_data["design"]["design_intervention_model"] == "Treatment" + assert ( + editor_response_data["design"]["design_primary_purpose"] + == "Parallel Assignment" + ) + assert editor_response_data["design"]["design_masking"] == "Double" + assert editor_response_data["design"]["design_masking_description"] == "tewsfdasf" + assert editor_response_data["design"]["design_who_masked_list"] == [ + "Participant", + "Care Provider", + ] + assert editor_response_data["design"]["phase_list"] == ["N/A"] + assert editor_response_data["design"]["enrollment_count"] == 3 + assert editor_response_data["design"]["enrollment_type"] == "Actual" + assert editor_response_data["design"]["number_arms"] == 2 + assert editor_response_data["design"]["design_observational_model_list"] == [ + "Cohort", + "Case-Control", + ] + assert editor_response_data["design"]["design_time_perspective_list"] == ["Other"] + assert editor_response_data["design"]["bio_spec_retention"] == "None Retained" + assert editor_response_data["design"]["target_duration"] == "rewrwe" + assert editor_response_data["design"]["number_groups_cohorts"] == 1 + assert editor_response_data["eligibility"]["gender"] == "All" + assert editor_response_data["eligibility"]["gender_based"] == "Yes" + assert editor_response_data["eligibility"]["minimum_age_value"] == 18 + assert editor_response_data["primary_identifier"]["identifier"] == "test" + assert editor_response_data["primary_identifier"]["identifier_type"] == "test" + assert editor_response_data["status"]["overall_status"] == "Withdrawn" + assert editor_response_data["status"]["start_date"] == "2023-11-15 00:00:00" + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_name"] + == "editor sponsor name" + ) + assert editor_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["sponsors"]["lead_sponsor_name"] == "editor sponsor name" + ) + assert editor_response_data["collaborators"] == ["editor-collaborator1123"] + assert editor_response_data["conditions"] == [ + "true", + "editor-conditions string", + "editor-keywords string", + "editor-size string", + ] + + assert editor_response_data["ipd_sharing"]["ipd_sharing"] == "Yes" + assert editor_response_data["ipd_sharing"]["ipd_sharing_info_type_list"] == [ + "Study Protocol", + "Analytical Code", + ] + + assert editor_response_data["oversight"] is True + + +def test_get_version_dataset_metadata(clients): """ Given a Flask application configured for testing - WHEN the '/study//dataset//version//dataset-metadata' endpoint is requested (GET) + WHEN the '/study//dataset//version//dataset-metadata' + endpoint is requested (GET) THEN check that the response is valid and retrieves the design metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore - _logged_in_client.post( + contributor_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", json=[ { @@ -221,7 +622,7 @@ def test_get_version_dataset_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + creator_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", json=[ { @@ -242,11 +643,11 @@ def test_get_version_dataset_metadata(_logged_in_client): ], ) - _logged_in_client.post( + date_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/date", json=[{"date": 20210101, "type": "Type", "information": "Info"}], ) - _logged_in_client.post( + funder_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", json=[ { @@ -260,7 +661,7 @@ def test_get_version_dataset_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + rights_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", json=[ { @@ -271,7 +672,7 @@ def test_get_version_dataset_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + subject_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", json=[ { @@ -283,7 +684,7 @@ def test_get_version_dataset_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + alt_identifier_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", json=[ { @@ -292,7 +693,7 @@ def test_get_version_dataset_metadata(_logged_in_client): } ], ) - _logged_in_client.post( + related_item_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", json=[ { @@ -331,13 +732,96 @@ def test_get_version_dataset_metadata(_logged_in_client): } ], ) + + assert contributor_response.status_code == 201 + assert creator_response.status_code == 201 + assert date_response.status_code == 201 + assert funder_response.status_code == 201 + assert rights_response.status_code == 201 + assert subject_response.status_code == 201 + assert alt_identifier_response.status_code == 201 + assert related_item_response.status_code == 201 + response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/dataset-metadata" ) - response_data = json.loads(response.data) - # print(response_data) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/dataset-metadata" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/dataset-metadata" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/dataset-metadata" + ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + + # seach for main title index in response_data[n]["titles"] + # pylint: disable=line-too-long + main_title_0 = next( + ( + index + for (index, d) in enumerate(response_data["related_items"][0]["titles"]) + if d["type"] == "MainTitle" + ), + None, + ) + # seach for subtitle index in response_data["related_items"][0]["titles"] + sub_title_0 = next( + ( + index + for (index, d) in enumerate(response_data["related_items"][0]["titles"]) + if d["type"] == "Subtitle" + ), + None, + ) + a_main_title_0 = next( + ( + index + for (index, d) in enumerate( + admin_response_data["related_items"][0]["titles"] + ) + if d["type"] == "MainTitle" + ), + None, + ) + a_sub_title_0 = next( + ( + index + for (index, d) in enumerate( + admin_response_data["related_items"][0]["titles"] + ) + if d["type"] == "Subtitle" + ), + None, + ) + e_main_title_0 = next( + ( + index + for (index, d) in enumerate( + editor_response_data["related_items"][0]["titles"] + ) + if d["type"] == "MainTitle" + ), + None, + ) + e_sub_title_0 = next( + ( + index + for (index, d) in enumerate( + editor_response_data["related_items"][0]["titles"] + ) + if d["type"] == "Subtitle" + ), + None, + ) assert response_data["contributors"][0]["name"] == "Name here" assert response_data["contributors"][0]["name_type"] == "Personal" @@ -353,10 +837,10 @@ def test_get_version_dataset_metadata(_logged_in_client): assert response_data["subjects"][0]["subject"] == "Subject" assert response_data["about"]["language"] == "English" - assert response_data["about"]["resource_type"] == "Resource Type" + assert response_data["about"]["resource_type"] == "Editor Resource Type" assert response_data["about"]["size"] == ["Size"] - assert response_data["access"]["type"] == "type" - assert response_data["access"]["description"] == "description" + assert response_data["access"]["type"] == "editor type" + assert response_data["access"]["description"] == "editor description" assert response_data["consent"]["noncommercial"] is True assert response_data["consent"]["geog_restrict"] is True assert response_data["consent"]["research_type"] is True @@ -365,7 +849,7 @@ def test_get_version_dataset_metadata(_logged_in_client): assert response_data["publisher"]["publisher"] == "Publisher" assert ( response_data["publisher"]["managing_organization_name"] - == "Managing Organization Name" + == "Managing Editor Organization Name" ) assert response_data["identifiers"][0]["identifier"] == "identifier test" @@ -379,10 +863,14 @@ def test_get_version_dataset_metadata(_logged_in_client): ) assert response_data["related_items"][0]["creators"][0]["name"] == "Name" assert response_data["related_items"][0]["creators"][0]["name_type"] == "Personal" - assert response_data["related_items"][0]["titles"][0]["title"] == "Title" - assert response_data["related_items"][0]["titles"][0]["type"] == "MainTitle" - assert response_data["related_items"][0]["titles"][1]["title"] == "Title" - assert response_data["related_items"][0]["titles"][1]["type"] == "Subtitle" + assert response_data["related_items"][0]["titles"][main_title_0]["title"] == "Title" + assert ( + response_data["related_items"][0]["titles"][main_title_0]["type"] == "MainTitle" + ) + assert response_data["related_items"][0]["titles"][sub_title_0]["title"] == "Title" + assert ( + response_data["related_items"][0]["titles"][sub_title_0]["type"] == "Subtitle" + ) assert ( response_data["related_items"][0]["identifiers"][0]["identifier"] == "Identifier" @@ -390,13 +878,153 @@ def test_get_version_dataset_metadata(_logged_in_client): assert response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" assert response_data["related_items"][0]["type"] == "Type" + assert admin_response_data["contributors"][0]["name"] == "Name here" + assert admin_response_data["contributors"][0]["name_type"] == "Personal" + assert admin_response_data["contributors"][0]["contributor_type"] == "Con Type" + assert admin_response_data["dates"][0]["date"] == "01-01-1970" + assert admin_response_data["dates"][0]["type"] == "Type" + assert admin_response_data["creators"][0]["name"] == "Name here" + assert admin_response_data["creators"][0]["name_type"] == "Personal" + assert admin_response_data["funders"][0]["name"] == "Name" + assert admin_response_data["funders"][0]["identifier"] == "Identifier" + assert admin_response_data["rights"][0]["identifier"] == "Identifier" + assert admin_response_data["rights"][0]["rights"] == "Rights" + assert admin_response_data["subjects"][0]["subject"] == "Subject" + assert admin_response_data["about"]["language"] == "English" + + assert admin_response_data["about"]["resource_type"] == "Editor Resource Type" + assert admin_response_data["about"]["size"] == ["Size"] + assert admin_response_data["access"]["type"] == "editor type" + assert admin_response_data["access"]["description"] == "editor description" + assert admin_response_data["consent"]["noncommercial"] is True + assert admin_response_data["consent"]["geog_restrict"] is True + assert admin_response_data["consent"]["research_type"] is True + assert admin_response_data["de_identification"]["direct"] is True + assert admin_response_data["de_identification"]["type"] == "Level" + assert admin_response_data["publisher"]["publisher"] == "Publisher" + assert ( + admin_response_data["publisher"]["managing_organization_name"] + == "Managing Editor Organization Name" + ) + + assert admin_response_data["identifiers"][0]["identifier"] == "identifier test" + assert admin_response_data["identifiers"][0]["type"] == "ARK" + assert admin_response_data["related_items"][0]["publication_year"] == "1970" + assert admin_response_data["related_items"][0]["publisher"] == "Publisher" + assert ( + admin_response_data["related_items"][0]["contributors"][0]["name"] + == "Ndafsdame" + ) + assert ( + admin_response_data["related_items"][0]["contributors"][0]["contributor_type"] + == "Con Type" + ) + assert admin_response_data["related_items"][0]["creators"][0]["name"] == "Name" + assert ( + admin_response_data["related_items"][0]["creators"][0]["name_type"] + == "Personal" + ) + assert ( + admin_response_data["related_items"][0]["titles"][a_main_title_0]["title"] + == "Title" + ) + assert ( + admin_response_data["related_items"][0]["titles"][a_main_title_0]["type"] + == "MainTitle" + ) + assert ( + admin_response_data["related_items"][0]["titles"][a_sub_title_0]["title"] + == "Title" + ) + assert ( + admin_response_data["related_items"][0]["titles"][a_sub_title_0]["type"] + == "Subtitle" + ) + assert ( + admin_response_data["related_items"][0]["identifiers"][0]["identifier"] + == "Identifier" + ) + assert admin_response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" + assert admin_response_data["related_items"][0]["type"] == "Type" + + assert editor_response_data["contributors"][0]["name"] == "Name here" + assert editor_response_data["contributors"][0]["name_type"] == "Personal" + assert editor_response_data["contributors"][0]["contributor_type"] == "Con Type" + assert editor_response_data["dates"][0]["date"] == "01-01-1970" + assert editor_response_data["dates"][0]["type"] == "Type" + assert editor_response_data["creators"][0]["name"] == "Name here" + assert editor_response_data["creators"][0]["name_type"] == "Personal" + assert editor_response_data["funders"][0]["name"] == "Name" + assert editor_response_data["funders"][0]["identifier"] == "Identifier" + assert editor_response_data["rights"][0]["identifier"] == "Identifier" + assert editor_response_data["rights"][0]["rights"] == "Rights" + assert editor_response_data["subjects"][0]["subject"] == "Subject" + assert editor_response_data["about"]["language"] == "English" + + assert editor_response_data["about"]["resource_type"] == "Editor Resource Type" + assert editor_response_data["about"]["size"] == ["Size"] + assert editor_response_data["access"]["type"] == "editor type" + assert editor_response_data["access"]["description"] == "editor description" + assert editor_response_data["consent"]["noncommercial"] is True + assert editor_response_data["consent"]["geog_restrict"] is True + assert editor_response_data["consent"]["research_type"] is True + assert editor_response_data["de_identification"]["direct"] is True + assert editor_response_data["de_identification"]["type"] == "Level" + assert editor_response_data["publisher"]["publisher"] == "Publisher" + assert ( + editor_response_data["publisher"]["managing_organization_name"] + == "Managing Editor Organization Name" + ) + + assert editor_response_data["identifiers"][0]["identifier"] == "identifier test" + assert editor_response_data["identifiers"][0]["type"] == "ARK" + assert editor_response_data["related_items"][0]["publication_year"] == "1970" + assert editor_response_data["related_items"][0]["publisher"] == "Publisher" + assert ( + editor_response_data["related_items"][0]["contributors"][0]["name"] + == "Ndafsdame" + ) + assert ( + editor_response_data["related_items"][0]["contributors"][0]["contributor_type"] + == "Con Type" + ) + assert editor_response_data["related_items"][0]["creators"][0]["name"] == "Name" + assert ( + editor_response_data["related_items"][0]["creators"][0]["name_type"] + == "Personal" + ) + assert ( + editor_response_data["related_items"][0]["titles"][e_main_title_0]["title"] + == "Title" + ) + assert ( + editor_response_data["related_items"][0]["titles"][e_main_title_0]["type"] + == "MainTitle" + ) + assert ( + editor_response_data["related_items"][0]["titles"][e_sub_title_0]["title"] + == "Title" + ) + assert ( + editor_response_data["related_items"][0]["titles"][e_sub_title_0]["type"] + == "Subtitle" + ) + assert ( + editor_response_data["related_items"][0]["identifiers"][0]["identifier"] + == "Identifier" + ) + assert editor_response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" + assert editor_response_data["related_items"][0]["type"] == "Type" + -def test_get_version_readme(_logged_in_client): +def test_get_version_readme(clients): """ Given a Flask application configured for testing - WHEN the '/study//dataset//version//readme' endpoint is requested (GET) + WHEN the '/study//dataset//version//readme' + endpoint is requested (GET) THEN check that the response is valid and retrieves the design metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore @@ -404,17 +1032,38 @@ def test_get_version_readme(_logged_in_client): response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme" ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme" + ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + + assert response_data["readme"] == "" + assert admin_response_data["readme"] == "" + assert editor_response_data["readme"] == "" -def test_put_version_readme(_logged_in_client): +def test_put_version_readme(clients): """ Given a Flask application configured for testing - WHEN the '/study//dataset//version//readme' endpoint is requested (PUT) + WHEN the '/study//dataset//version//readme' + endpoint is requested (PUT) THEN check that the response is valid and retrieves the design metadata """ # create a new dataset and delete it afterwards + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore @@ -423,19 +1072,41 @@ def test_put_version_readme(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme", json={"readme": "readme test"}, ) + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme", + json={"readme": "readme test"}, + ) + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme", + json={"readme": "readme test"}, + ) + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/readme", + json={"readme": "readme test"}, + ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + assert response_data["readme"] == "readme test" + assert admin_response_data["readme"] == "readme test" + assert editor_response_data["readme"] == "readme test" -def test_put_version_changelog(_logged_in_client): +def test_put_version_changelog(clients): """ Given a Flask application configured for testing - WHEN the '/study//dataset//version//changelog' endpoint is requested (PUT) + WHEN the '/study//dataset//version//changelog' + endpoint is requested (PUT) THEN check that the response is valid and retrieves the design metadata """ # create a new dataset and delete it afterwards + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore @@ -443,18 +1114,40 @@ def test_put_version_changelog(_logged_in_client): f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog", json={"changelog": "changelog test"}, ) + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog", + json={"changelog": "changelog test"}, + ) + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog", + json={"changelog": "changelog test"}, + ) + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog", + json={"changelog": "changelog test"}, + ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 response_data = json.loads(response.data) - assert response_data == "changelog test" + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + assert response_data["changelog"] == "changelog test" + assert admin_response_data["changelog"] == "changelog test" + assert editor_response_data["changelog"] == "changelog test" -def test_get_version_changelog(_logged_in_client): + +def test_get_version_changelog(clients): """ Given a Flask application configured for testing - WHEN the '/study//dataset//version//changelog' endpoint is requested (GET) + WHEN the '/study//dataset//version//changelog' + endpoint is requested (GET) THEN check that the response is valid and retrieves the design metadata """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore @@ -462,5 +1155,51 @@ def test_get_version_changelog(_logged_in_client): response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog" ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/changelog" + ) assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 403 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + + assert response_data["changelog"] == "changelog test" + assert admin_response_data["changelog"] == "changelog test" + assert editor_response_data["changelog"] == "changelog test" + + +def test_delete_dataset_version(clients): + """ + Given a Flask application configured for testing, study ID, dataset ID and version ID + When the '/study/{study_id}/dataset/{dataset_id}/version/{version_id}' + is requested (DELETE) + Then check that the response is valid and deletes the dataset version + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] + dataset_id = pytest.global_dataset_id + version_id = pytest.global_dataset_version_id + + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}" + ) + + assert viewer_response.status_code == 403 + assert editor_response.status_code == 403 + assert response.status_code == 204 diff --git a/tests/unit/test_study_models.py b/tests/unit/test_study_models.py index 6d9b249e..9bcc5489 100644 --- a/tests/unit/test_study_models.py +++ b/tests/unit/test_study_models.py @@ -1,25 +1,25 @@ -"""Tests for the Study model""" -import uuid +# """Tests for the Study model""" +# import uuid -from model.study import Study +# from model.study import Study -def test_new_study(): - """ - GIVEN a Study model - WHEN a new Study is created - THEN check the name, description, and owner fields are defined correctly - """ - study = Study.from_data( - { - "title": "Study1", - "image": "https://api.dicebear.com/6.x/adventurer/svg", - "last_updated": "2021-01-01", - } - ) +# def test_new_study(): +# """ +# GIVEN a Study model +# WHEN a new Study is created +# THEN check the name, description, and owner fields are defined correctly +# """ +# study = Study.from_data( +# { +# "title": "Study1", +# "image": "https://api.dicebear.com/6.x/adventurer/svg", +# "last_updated": "2021-01-01", +# } +# ) - assert study.title == "Study1" - assert uuid.UUID(study.id) - assert study.image == "https://api.dicebear.com/6.x/adventurer/svg" +# assert study.title == "Study1" +# assert uuid.UUID(study.id) +# assert study.image == "https://api.dicebear.com/6.x/adventurer/svg" - # assert study.owner.affiliations == "affiliations1" +# # assert study.owner.affiliations == "affiliations1" From b041334d86b75d12d2714315c5bd3bd7ec5ed351 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 5 Dec 2023 11:41:09 -0800 Subject: [PATCH 378/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20black=20issue=20w?= =?UTF-8?q?ith=20line=20length?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 36 ++++++++++++++++++------------------ apis/redcap.py | 41 ++++++++++++++++++++--------------------- 2 files changed, 38 insertions(+), 39 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 19e61b2d..5a7f2ffe 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -266,26 +266,26 @@ def post(self, study_id: int): return e.message, 400 if len(data["project_id"]) < 1: return ( - f"redcap project_id is required to connect a dashboard: \ - {data['project_id']}", + f"""redcap project_id is required to connect a dashboard: + {data['project_id']}""", 400, ) if len(data["reports"]) < 1: return ( - f"redcap reports are required to connect a dashboard: \ - {data['reports']}", + f"""redcap reports are required to connect a dashboard: + {data['reports']}""", 400, ) if len(data["dashboard_name"]) < 1: return ( - f"dashboard dashboard_name is required to connect a dashboard: \ - {data['dashboard_name']}", + f"""dashboard dashboard_name is required to connect a dashboard: + {data['dashboard_name']}""", 400, ) if len(data["dashboard_modules"]) < 1: return ( - f"dashboard dashboard_modules is required to connect a dashboard: \ - {data['dashboard_name']}", + f"""dashboard dashboard_modules is required to connect a dashboard: + {data['dashboard_name']}""", 400, ) connect_redcap_project_dashboard_data = ( @@ -453,32 +453,32 @@ def put(self, study_id: int): return e.message, 400 if len(data["project_id"]) < 1: return ( - f"redcap project_id is required to connect a dashboard: \ - {data['project_id']}", + f"""redcap project_id is required to connect a dashboard: + {data['project_id']}""", 400, ) if len(data["reports"]) < 1: return ( - f"redcap reports are required to connect a dashboard: \ - {data['reports']}", + f"""redcap reports are required to connect a dashboard: + {data['reports']}""", 400, ) if len(data["dashboard_id"]) < 1: return ( - f"dashboard dashboard_id is required to connect a dashboard: \ - {data['dashboard_id']}", + f"""dashboard dashboard_id is required to connect a dashboard: + {data['dashboard_id']}""", 400, ) if len(data["dashboard_name"]) < 1: return ( - f"dashboard dashboard_name is required to connect a dashboard: \ - {data['dashboard_name']}", + f"""dashboard dashboard_name is required to connect a dashboard: + {data['dashboard_name']}""", 400, ) if len(data["dashboard_modules"]) < 1: return ( - f"dashboard dashboard_modules is required to connect a dashboard: \ - {data['dashboard_name']}", + f"""dashboard dashboard_modules is required to connect a dashboard: + {data['dashboard_name']}""", 400, ) # Clear Redis Cache diff --git a/apis/redcap.py b/apis/redcap.py index 89adbd89..b14d36cf 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -116,32 +116,32 @@ def post(self, study_id: int): if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: \ - {data['project_title']}", + f"""redcap project_title is required for redcap access: + {data['project_title']}""", 400, ) if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: \ - {data['project_id']}", + f"""redcap project_id is required for redcap access: + {data['project_id']}""", 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: \ - {data['project_api_url']}", + f"""redcap project_api_url is required for redcap access: + {data['project_api_url']}""", 400, ) if len(data["project_api_key"]) < 1: return ( - f"redcap project_api_key is required for redcap access: \ - {data['project_api_key']}", + f"""redcap project_api_key is required for redcap access: + {data['project_api_key']}""", 400, ) - if isinstance(data["project_api_active"], bool): + if not isinstance(data["project_api_active"], bool): return ( - f"redcap project_api_active is required for redcap access: \ - {data['project_api_active']}", + f"""redcap project_api_active is required for redcap access: + {data['project_api_active']}""", 400, ) @@ -207,29 +207,28 @@ def put(self, study_id: int): if len(data["project_id"]) < 1: return ( - f"redcap project_id is required for redcap access: \ - {data['project_id']}", + f"""redcap project_id is required for redcap access: + {data['project_id']}""", 400, ) if len(data["project_title"]) < 1: return ( - f"redcap project_title is required for redcap access: \ - {data['project_title']}", + f"""redcap project_title is required for redcap access: + {data['project_title']}""", 400, ) if len(data["project_api_url"]) < 1: return ( - f"redcap project_api_url is required for redcap access: \ - {data['project_api_url']}", + f"""redcap project_api_url is required for redcap access: + {data['project_api_url']}""", 400, ) - if isinstance(data["project_api_active"], bool): + if not isinstance(data["project_api_active"], bool): return ( - f"redcap project_api_active is required for redcap access: \ - {data['project_api_active']}", + f"""redcap project_api_active is required for redcap access: + {data['project_api_active']}""", 400, ) - update_redcap_project_view = model.StudyRedcapProjectApi.query.get( data["project_id"] ) From 449797a3e923821fc7739114ea3545774c308dc3 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 11 Dec 2023 17:13:02 -0800 Subject: [PATCH 379/505] =?UTF-8?q?=E2=9C=A8=20feat:=20updates=20for=20new?= =?UTF-8?q?=20chart=20types/modules?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 3 +- modules/etl/config/__init__.py | 13 +- modules/etl/config/aireadi_config.py | 322 ++++++++------------- modules/etl/transforms/redcap_transform.py | 19 +- 4 files changed, 151 insertions(+), 206 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 5a7f2ffe..5ad32484 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -262,7 +262,7 @@ def post(self, study_id: int): try: validate(request.json, schema) except ValidationError as e: - print(e) + print("validation error") return e.message, 400 if len(data["project_id"]) < 1: return ( @@ -368,6 +368,7 @@ def get(self, study_id: int): } | transformConfigs["redcap"] mergedTransform = RedcapTransform(redcap_etl_config).merged + # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["dashboard_modules"]: transform, module_etl_config = transformConfigs[dashboard_module["id"]] diff --git a/modules/etl/config/__init__.py b/modules/etl/config/__init__.py index c863185c..0ce9700f 100644 --- a/modules/etl/config/__init__.py +++ b/modules/etl/config/__init__.py @@ -1,8 +1,11 @@ from .aireadi_config import ( redcapTransformConfig, - sexGenderTransformConfig, - raceEthnicityTransformConfig, - phenotypesTransformConfig, - overviewTransformConfig, - transformConfigs, + instrumentCompletionStatusBySiteTransformConfig, + phenotypeRecruitmentBySiteTransformConfig, + raceRecruitmentBySiteTransformConfig, + raceSexBySiteTransformConfig, + phenotypeRaceBySexTransformConfig, + phenotypeSexBySiteTransformConfig, + currentMedicationsBySiteTransformConfig, ) +from .aireadi_config import transformConfigs diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 3485b12a..633d9e82 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -53,7 +53,7 @@ ] computed_columns: List = [ - "phenotype", + "phenotypes", "treatments", ] @@ -118,7 +118,7 @@ "0": "Incomplete", } -phenotype_column_map: Dict[str, str] = { +phenotypes_column_map: Dict[str, str] = { "mhterm_dm2": "Type II Diabetes", "mhterm_predm": "Prediabetes", "mh_a1c": "Elevated A1C", @@ -127,7 +127,7 @@ treatments_column_map: Dict[str, str] = { "cmtrt_a1c": "Oral Medication", "cmtrt_glcs": "Non-Insulin Injectable", - "cmtrt_insln": "Insuling Injectable", + "cmtrt_insln": "Insulin Injectable", "cmtrt_lfst": "Lifestyle Management", } @@ -148,8 +148,10 @@ ( "new_column_from_binary_columns_positive_class", { - "column_name_map": phenotype_column_map, - "new_column_name": "phenotype", + "column_name_map": phenotypes_column_map, + "new_column_name": "phenotypes", + "all_negative_value": "Control", + "default_value": missing_value_generic, }, ), ( @@ -157,6 +159,8 @@ { "column_name_map": treatments_column_map, "new_column_name": "treatments", + "all_negative_value": "No Treatments", + "default_value": missing_value_generic, }, ), ( @@ -218,10 +222,10 @@ # # Overview -overviewTransformConfig: Tuple[str, Dict[str, Any]] = ( +instrumentCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "compoundTransform", { - "key": "overview", + "key": "instrument-completion-status-by-site", "strict": True, "transforms": [ { @@ -1778,14 +1782,14 @@ ) # Recruitment Counts by Site -recruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( +raceRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "recruitment", + "key": "race-recruitment-by-site", "strict": True, "transforms": [ { - "name": "Recruitment", + "name": "Race Recruitment", "vtype": "DoubleDiscrete", "methods": [ { @@ -1799,7 +1803,7 @@ "name": "Site", "field": "siteid", "missing_value": missing_value_generic, - "astyoe": str, + "astype": str, }, "subgroup": { "name": "Race", @@ -1836,19 +1840,19 @@ }, ) -# Race & Ethnicity Counts by Site -raceEthnicityTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Recruitment Counts by Site +phenotypeRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "race-ethnicity", + "key": "phenotype-recruitment-by-site", "strict": True, "transforms": [ { - "name": "Race & Ethnicity", - "vtype": "DoubleCategorical", + "name": "Phenotype Recruitment", + "vtype": "DoubleDiscrete", "methods": [ { - "groups": ["siteid", "race", "ethnic"], + "groups": ["siteid", "phenotypes", "scrcmpdat"], "value": "record_id", "func": "count", } @@ -1860,21 +1864,79 @@ "missing_value": missing_value_generic, "astype": str, }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "color": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "scrcmpdat", + "missing_value": missing_value_generic, + "astype": int, + "remap": lambda x: datetime.strptime( + x["record"][x["accessors"]["x"]["field"]], "%Y-%m-%d" + ) + .isocalendar() + .week, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race & Sex Counts by Race +raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-sex-by-site", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "race", "siteid"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, "group": { - "name": "Race", - "field": "race", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Ethnicity", - "field": "ethnic", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, "color": { - "name": "Ethnicity", - "field": "ethnic", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, @@ -1890,19 +1952,19 @@ }, ) -# Sex & Gender Counts by Site -sexGenderTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Phenotype & Sex Counts by Race +phenotypeSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "sex-gender", + "key": "phenotype-sex-by-site", "strict": True, "transforms": [ { - "name": "Sex & Gender", + "name": "Phenotype & Sex by Site", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "scrsex", "genderid"], + "groups": ["scrsex", "phenotypes", "siteid"], "value": "record_id", "func": "count", } @@ -1914,20 +1976,20 @@ "missing_value": missing_value_generic, }, "group": { - "name": "Gender", - "field": "genderid", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Sex", - "field": "scrsex", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, "color": { - "name": "Sex", - "field": "scrsex", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, @@ -1943,179 +2005,44 @@ }, ) -# # Phenotypes -# phenotypesTransformConfig: Tuple[str, Dict[str, Any]] = ( -# "compoundTransform", -# { -# "key": "phenotype", -# "strict": True, -# "transforms": [ -# { -# "name": "Prediabetes", -# "vtype": "SingleCategorical", -# "methods": [ -# { -# "groups": ["siteid", "mhterm_predm"], -# "value": "record_id", -# "func": "count", -# } -# ], -# "accessors": { -# "filterby": { -# "name": "Site", -# "field": "siteid", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "group": { -# "name": "Prediabetes", -# "field": "mhterm_predm", -# "remap": lambda x: "Prediabetes" -# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" -# else "No", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "color": { -# "name": "Prediabetes", -# "field": "mhterm_predm", -# "remap": lambda x: "Prediabetes" -# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" -# else "No", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "value": { -# "name": "Count (N)", -# "field": "record_id", -# "missing_value": missing_value_generic, -# "astype": int, -# }, -# }, -# }, -# { -# "name": "Type I Diabetes", -# "vtype": "SingleCategorical", -# "methods": [ -# { -# "groups": ["siteid", "mhterm_dm1"], -# "value": "record_id", -# "func": "count", -# } -# ], -# "accessors": { -# "filterby": { -# "name": "Site", -# "field": "siteid", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "group": { -# "name": "Type I Diabetes", -# "field": "mhterm_dm1", -# "remap": lambda x: "Type I Diabetes" -# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" -# else "No", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "color": { -# "name": "Type I Diabetes", -# "field": "mhterm_dm1", -# "remap": lambda x: "Type I Diabetes" -# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" -# else "No", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "value": { -# "name": "Count (N)", -# "field": "record_id", -# "missing_value": missing_value_generic, -# "astype": int, -# }, -# }, -# }, -# { -# "name": "Type II Diabetes", -# "vtype": "SingleCategorical", -# "methods": [ -# { -# "groups": ["siteid", "mhterm_dm2"], -# "value": "record_id", -# "func": "count", -# } -# ], -# "accessors": { -# "filterby": { -# "name": "Site", -# "field": "siteid", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "group": { -# "name": "Type II Diabetes", -# "field": "mhterm_dm2", -# "remap": lambda x: "Type II Diabetes" -# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" -# else "No", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "color": { -# "name": "Type II Diabetes", -# "field": "mhterm_dm2", -# "remap": lambda x: "Type II Diabetes" -# if str(x["record"][x["accessors"]["group"]["field"]]) == "Yes" -# else "No", -# "missing_value": missing_value_generic, -# "astype": str, -# }, -# "value": { -# "name": "Count (N)", -# "field": "record_id", -# "missing_value": missing_value_generic, -# "astype": int, -# }, -# }, -# }, -# ], -# }, -# ) - -# Phenotypes -phenotypesTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", +# Phenotype & Race Counts by Sex +phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", { - "key": "phenotype", + "key": "phenotype-race-by-sex", "strict": True, "transforms": [ { - "name": "Phenotype", - "vtype": "SingleCategorical", + "name": "Phenotype & Race by Sex", + "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "phenotype"], + "groups": ["phenotypes", "race", "scrsex"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, - "astype": str, }, "group": { "name": "Phenotype", - "field": "phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, "color": { - "name": "Phenotype", - "field": "mhterm_predm", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, @@ -2126,19 +2053,19 @@ "astype": int, }, }, - } + }, ], }, ) -currentMedicationsTransformConfig: Tuple[str, Dict[str, Any]] = ( +currentMedicationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "current-medications", + "key": "current-medications-by-site", "strict": True, "transforms": [ { - "name": "Current Medications", + "name": "Current Medications by Site", "vtype": "SingleCategorical", "methods": [ { @@ -2181,10 +2108,11 @@ transformConfigs: Dict[str, Any] = { "redcap": redcapTransformConfig, - "overview": overviewTransformConfig, - "recruitment": recruitmentTransformConfig, - "race-ethnicity": raceEthnicityTransformConfig, - "sex-gender": sexGenderTransformConfig, - "phenotypes": phenotypesTransformConfig, - "current-medications": currentMedicationsTransformConfig, + "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, + "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, + "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, + "race-sex-by-site": raceSexBySiteTransformConfig, + "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, + "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, + "current-medications-by-site": currentMedicationsBySiteTransformConfig, } diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 90a5d04d..f2525622 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -565,17 +565,27 @@ def _new_column_from_binary_columns_positive_class( df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", + all_negative_value: str = "", + default_value: str | None = "Value Unavailable", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: + new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = ( - df[list(column_name_map.keys())].idxmax(axis=1).map(column_name_map) - ) + df[new_column_name] = "" + for column_name, column_value in column_name_map.items(): + df.loc[df[column_name] == "Yes", new_column_name] += f"{column_value}{self.multivalue_separator}" + for column_name, column_value in column_name_map.items(): + df.loc[(df[column_name] == default_value) & (df[new_column_name] == ""), new_column_name] = default_value + df.loc[df[new_column_name] == "", new_column_name] = all_negative_value + # Remove delimiter character if column ends with it + rgx = f"\\{self.multivalue_separator}$" + df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex = True) + return df def new_column_from_binary_columns_positive_class( @@ -583,6 +593,8 @@ def new_column_from_binary_columns_positive_class( df: pd.DataFrame, column_name_map: dict, new_column_name: str = "", + all_negative_value: str = "", + default_value: str | None = "Value Unavailable", dtype: Callable = float, ) -> pd.DataFrame: """ @@ -595,6 +607,7 @@ def new_column_from_binary_columns_positive_class( df=df, column_name_map=column_name_map, new_column_name=new_column_name, + default_value=default_value, dtype=dtype, ) From 91fbb9d2f9bce7a63b9663ecfe78294ccc8bfcef Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 12 Dec 2023 01:13:28 +0000 Subject: [PATCH 380/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/transforms/redcap_transform.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index f2525622..ec14a2ee 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -570,7 +570,6 @@ def _new_column_from_binary_columns_positive_class( dtype: Callable = float, annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - new_column_name = ( new_column_name if len(new_column_name) > 0 @@ -578,13 +577,18 @@ def _new_column_from_binary_columns_positive_class( ) df[new_column_name] = "" for column_name, column_value in column_name_map.items(): - df.loc[df[column_name] == "Yes", new_column_name] += f"{column_value}{self.multivalue_separator}" + df.loc[ + df[column_name] == "Yes", new_column_name + ] += f"{column_value}{self.multivalue_separator}" for column_name, column_value in column_name_map.items(): - df.loc[(df[column_name] == default_value) & (df[new_column_name] == ""), new_column_name] = default_value + df.loc[ + (df[column_name] == default_value) & (df[new_column_name] == ""), + new_column_name, + ] = default_value df.loc[df[new_column_name] == "", new_column_name] = all_negative_value # Remove delimiter character if column ends with it rgx = f"\\{self.multivalue_separator}$" - df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex = True) + df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex=True) return df From 676a38263b3c7ef5b0d452d87eaa4b5829f72969 Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Tue, 12 Dec 2023 02:28:04 -0800 Subject: [PATCH 381/505] =?UTF-8?q?refactor:=20=E2=99=BB=EF=B8=8F=20api=20?= =?UTF-8?q?responses=20updated=20for=20swagger=20docs=20(#32)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :recycle: refactor: update api.docs on 201 success responses * style: 🎨 fix code style issues with Black * :recycle: refactor: update PUT contributor owner response changed to 200 * :reycle: refactor: correct api.response success statuses + add api.response for non existing requests --------- Co-authored-by: Lint Action --- apis/authentication.py | 4 +-- apis/contributor.py | 6 ++-- apis/dataset.py | 28 +++++++++---------- .../dataset_alternate_identifier.py | 2 +- apis/dataset_metadata/dataset_contributor.py | 4 +-- apis/dataset_metadata/dataset_date.py | 2 +- apis/dataset_metadata/dataset_description.py | 2 +- apis/dataset_metadata/dataset_funder.py | 2 +- apis/dataset_metadata/dataset_other.py | 4 +-- apis/dataset_metadata/dataset_related_item.py | 2 +- apis/dataset_metadata/dataset_rights.py | 2 +- apis/dataset_metadata/dataset_subject.py | 2 +- apis/dataset_metadata/dataset_title.py | 2 +- apis/participant.py | 6 ++-- apis/study.py | 2 +- apis/study_metadata/study_arm.py | 2 ++ apis/study_metadata/study_available_ipd.py | 2 +- apis/study_metadata/study_contact.py | 2 ++ apis/study_metadata/study_description.py | 2 ++ apis/study_metadata/study_design.py | 2 ++ apis/study_metadata/study_eligibility.py | 2 ++ apis/study_metadata/study_identification.py | 2 +- apis/study_metadata/study_intervention.py | 2 ++ apis/study_metadata/study_ipdsharing.py | 2 ++ apis/study_metadata/study_link.py | 2 ++ apis/study_metadata/study_location.py | 2 ++ apis/study_metadata/study_other.py | 2 ++ apis/study_metadata/study_overall_official.py | 2 +- apis/study_metadata/study_reference.py | 2 ++ .../study_sponsors_collaborators.py | 2 ++ apis/study_metadata/study_status.py | 2 ++ apis/user.py | 4 ++- 32 files changed, 67 insertions(+), 39 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 20c5e990..dace3298 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -47,7 +47,7 @@ class UnauthenticatedException(Exception): class SignUpUser(Resource): """SignUpUser class is used to sign up new users to the system""" - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") # @api.marshal_with(signup_model) @api.expect(signup_model) @@ -371,7 +371,7 @@ def is_granted(permission: str, study=None): class Logout(Resource): """Logout class is used to log out users from the system""" - @api.response(200, "Success") + @api.response(204, "Success") @api.response(400, "Validation Error") def post(self): """simply logges out user from the system""" diff --git a/apis/contributor.py b/apis/contributor.py index 44b0bcc2..fb293a40 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -34,9 +34,9 @@ def get(self, study_id: int): contributors_list = [c.to_dict() for c in contributors] + [ c.to_dict() for c in invited_contributors ] - return contributors_list + return contributors_list, 200 - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): @@ -218,4 +218,4 @@ def put(self, study_id: int, user_id: int): existing_owner.permission = "admin" model.db.session.commit() - return Response(status=204) + return existing_contributor.to_dict(), 200 diff --git a/apis/dataset.py b/apis/dataset.py index e1b4a3e7..2f5a2a35 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -40,7 +40,7 @@ @api.route("/study//dataset") class DatasetList(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(dataset) @api.doc("view datasets") @@ -77,17 +77,17 @@ def post(self, study_id): # TODO not finalized endpoint. have to set functionality @api.route("/study//dataset/") -@api.response(201, "Success") -@api.response(400, "Validation Error") +# @api.response(201, "Success") +# @api.response(400, "Validation Error") @api.doc("view dataset") class DatasetResource(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument data_obj = model.Dataset.query.get(dataset_id) return data_obj.to_dict(), 200 - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("update dataset") def put(self, study_id: int, dataset_id: int): @@ -123,7 +123,7 @@ def delete(self, study_id: int, dataset_id: int): @api.route("/study//dataset//version/") class VersionResource(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("dataset version") def get( @@ -135,7 +135,7 @@ def get( dataset_version = model.Version.query.get(version_id) return dataset_version.to_dict(), 200 - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("update dataset version") def put( @@ -166,7 +166,7 @@ def delete( @api.route("/study//dataset//version") class VersionList(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("view versions") def get(self, study_id: int, dataset_id: int): @@ -212,7 +212,7 @@ def post(self, study_id: int, dataset_id: int): @api.route("/study//dataset//version//study-metadata") class VersionDatasetMetadataResource(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("version study metadata get") def get(self, study_id: str, dataset_id: str, version_id: str): @@ -229,7 +229,7 @@ def get(self, study_id: str, dataset_id: str, version_id: str): "/study//dataset//version//dataset-metadata" ) class VersionStudyMetadataResource(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("version dataset metadata get") def get(self, study_id: str, dataset_id: str, version_id: str): @@ -244,7 +244,7 @@ def get(self, study_id: str, dataset_id: str, version_id: str): @api.route("/study//dataset//version//changelog") class VersionDatasetChangelog(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("version changelog") def get(self, study_id: str, dataset_id: str, version_id: str): @@ -256,7 +256,7 @@ def get(self, study_id: str, dataset_id: str, version_id: str): ).one_or_none() return {"changelog": version.changelog}, 200 - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("version changelog update") def put( @@ -274,7 +274,7 @@ def put( @api.route("/study//dataset//version//readme") class VersionDatasetReadme(Resource): - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("version readme") def get(self, study_id: str, dataset_id: str, version_id: str): @@ -286,7 +286,7 @@ def get(self, study_id: str, dataset_id: str, version_id: str): ).one_or_none() return version.version_readme.to_dict(), 200 - @api.response(201, "Success") + @api.response(200, "Success") @api.response(400, "Validation Error") @api.doc("version readme update") def put( diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 2971e583..ffd6ad0d 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -35,7 +35,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argum return [d.to_dict() for d in dataset_identifier_], 200 @api.doc("update identifier") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset alternate identifier""" diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py index 7fb9cd08..25623f04 100644 --- a/apis/dataset_metadata/dataset_contributor.py +++ b/apis/dataset_metadata/dataset_contributor.py @@ -33,7 +33,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume ], 200 @api.doc("update contributor") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset contributor""" @@ -177,7 +177,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"]], 200 @api.doc("update creator") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset creator""" diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 57916514..37bf73d4 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -35,7 +35,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_date_], 200 @api.doc("update date") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset date""" diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 68ffdd9f..9183d624 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -35,7 +35,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_description_], 200 @api.doc("update description") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset description""" diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 0b03e14f..2799e58c 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -39,7 +39,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_funder_], 200 @api.doc("update funder") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument """Update dataset funder""" diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 633d4ade..85593068 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -33,7 +33,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset other metadata""" dataset_ = model.Dataset.query.get(dataset_id) dataset_other_ = dataset_.dataset_other - return dataset_other_.to_dict() + return dataset_other_.to_dict(), 200 @api.doc("other update") @api.response(200, "Success") @@ -78,7 +78,7 @@ def put(self, study_id: int, dataset_id: int): dataset_ = model.Dataset.query.get(dataset_id) dataset_.dataset_other.update(data) model.db.session.commit() - return dataset_.dataset_other.to_dict() + return dataset_.dataset_other.to_dict(), 200 @api.route("/study//dataset//metadata/publisher") diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index 2b66de23..ba47c185 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -34,7 +34,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_related_item_], 200 @api.doc("update related item") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset related item""" diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 024d50a5..ce5b1fea 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -38,7 +38,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_rights_], 200 @api.doc("update rights") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset rights""" diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index 2de1a38a..b4837211 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -39,7 +39,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_subject_], 200 @api.doc("update subject") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset subject""" diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index e9ebba7f..f91f8c97 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -35,7 +35,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume return [d.to_dict() for d in dataset_title_], 200 @api.doc("update title") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int, dataset_id: int): """Update dataset title""" diff --git a/apis/participant.py b/apis/participant.py index c30d3769..799fe250 100644 --- a/apis/participant.py +++ b/apis/participant.py @@ -31,9 +31,9 @@ class AddParticipant(Resource): @api.marshal_with(participant_model) def get(self, study_id: int): # pylint: disable= unused-argument participants = model.Participant.query.all() - return [p.to_dict() for p in participants] + return [p.to_dict() for p in participants], 200 - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") # @api.marshal_with(participant_model) def post(self, study_id: int): @@ -60,7 +60,7 @@ def put(self, study_id: int, participant_id: int): update_participant = model.Participant.query.get(participant_id) update_participant.update(request.json) model.db.session.commit() - return update_participant.to_dict() + return update_participant.to_dict(), 200 @api.response(204, "Success") @api.response(400, "Validation Error") diff --git a/apis/study.py b/apis/study.py index 66c6d51d..199c00c5 100644 --- a/apis/study.py +++ b/apis/study.py @@ -54,7 +54,7 @@ def get(self): return [s.to_dict() for s in studies], 200 @api.expect(study_model) - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self): """Create a new study""" diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 5ffd2bf6..97336fa0 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -45,6 +45,8 @@ def get(self, study_id): return arm.to_dict(), 200 + @api.response(201, "Success") + @api.response(400, "Validation Error") def post(self, study_id): """Create study arm metadata""" # Schema validation diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index c7be0d58..04a918f4 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -45,7 +45,7 @@ def get(self, study_id: int): @api.doc( description="An array of objects are expected within the payload with the keys demonstrated below to create an available-ipd" # noqa E501 ) - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") @api.expect(study_available) def post(self, study_id: int): diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 45b6eeb3..4208c061 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -44,6 +44,8 @@ def get(self, study_id: int): return [s.to_dict() for s in sorted_study_contact if s.central_contact], 200 + @api.response(201, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int): """Create study contact metadata""" diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index cd2be85e..026626e6 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -34,6 +34,8 @@ def get(self, study_id: int): return study_description_.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): """Update study description metadata""" study_obj = model.Study.query.get(study_id) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 15dd68d1..37cd77df 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -51,6 +51,8 @@ def get(self, study_id: int): return study_design_.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): """Update study design metadata""" # Schema validation diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 681318e8..7bd1771c 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -44,6 +44,8 @@ def get(self, study_id: int): return study_.study_eligibility.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): """Update study eligibility metadata""" # Schema validation diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 806d1041..48eea1e5 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -39,7 +39,7 @@ def get(self, study_id: int): return identifiers.to_dict(), 200 @api.doc("identification add") - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") @api.expect(study_identification) def post(self, study_id: int): diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 32070070..e9c43dcf 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -44,6 +44,8 @@ def get(self, study_id: int): return [s.to_dict() for s in sorted_study_intervention], 200 + @api.response(201, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int): """Create study intervention metadata""" # Schema validation diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index 97cb9921..ac49e5f5 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -39,6 +39,8 @@ def get(self, study_id: int): return study_.study_ipdsharing.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): """Create study ipdsharing metadata""" # Schema validation diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index e8b72b98..e89ee64f 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -36,6 +36,8 @@ def get(self, study_id: int): sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at) return [s.to_dict() for s in sorted_study_link_], 200 + @api.response(201, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int): """Create study link metadata""" # Schema validation diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 9296f108..c2194041 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -43,6 +43,8 @@ def get(self, study_id: int): return [s.to_dict() for s in sorted_study_location], 200 + @api.response(201, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int): """Create study location metadata""" # Schema validation diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 07d27b2b..d9ecd765 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -82,6 +82,8 @@ def get(self, study_id: int): return study_other_conditions, 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): """Update study conditions metadata""" # Schema validation diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 2a210730..14ecc34e 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -44,7 +44,7 @@ def get(self, study_id: int): return [i.to_dict() for i in sorted_study_overall], 200 - @api.response(200, "Success") + @api.response(201, "Success") @api.response(400, "Validation Error") def post(self, study_id: int): """Create study overall official metadata""" diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index af179b16..b5f4c028 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -41,6 +41,8 @@ def get(self, study_id: int): return [s.to_dict() for s in sorted_study_reference], 200 + @api.response(201, "Success") + @api.response(400, "Validation Error") def post(self, study_id: int): """Create study reference metadata""" # Schema validation diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index da1dba0b..89ab8f2b 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -47,6 +47,8 @@ def get(self, study_id: int): return study_sponsors_collaborators_.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): """Update study sponsors metadata""" # Schema validation diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 84cbe9d3..cd8a9b96 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -41,6 +41,8 @@ def get(self, study_id: int): return study_status_.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") def put(self, study_id: int): """Update study status metadata""" # Schema validation diff --git a/apis/user.py b/apis/user.py index f4458cbf..04a2412d 100644 --- a/apis/user.py +++ b/apis/user.py @@ -41,9 +41,11 @@ def get(self): user_information = user.to_dict() # combine user and user_details to return a single object user_information.update(user_details.to_dict()) - return user_information + return user_information, 200 @api.expect(study_model) + @api.response(200, "Success") + @api.response(400, "Validation Error") # @api.marshal_with(study_model) def put(self): """Updates user details""" From 396502a00fe0f4783820eb0e214db8d547122dab Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Tue, 12 Dec 2023 02:28:41 -0800 Subject: [PATCH 382/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20updating=20user?= =?UTF-8?q?=20profile=20(#31)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :bug: fix: PUT user profile endpoint patched * :recycle: refactor: update user.py file * style: 🎨 fix code style issues with Black --------- Co-authored-by: Lint Action --- apis/user.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/apis/user.py b/apis/user.py index 04a2412d..68281ce7 100644 --- a/apis/user.py +++ b/apis/user.py @@ -64,6 +64,7 @@ def validate_is_valid_email(instance): schema = { "type": "object", "required": [ + "id", "email_address", "username", "first_name", @@ -75,20 +76,21 @@ def validate_is_valid_email(instance): ], "additionalProperties": False, "properties": { - "email_address": {"type": "string", "format": "valid email"}, - "username": {"type": "string", "minLength": 1}, - "first_name": {"type": "string", "minLength": 1}, - "last_name": {"type": "string", "minLength": 1}, - "institution": {"type": "string", "minLength": 1}, - "orcid": {"type": "string", "minLength": 1}, - "location": {"type": "string", "minLength": 1}, - "timezone": {"type": "string", "minLength": 1}, - "profile_image": {"type": "string", "minLength": 1}, # optional + "id": {"type": "string"}, + "email_address": {"type": "string", "format": "valid_email"}, + "username": {"type": "string", "minLength": 0}, + "first_name": {"type": "string", "minLength": 0}, + "last_name": {"type": "string", "minLength": 0}, + "institution": {"type": "string", "minLength": 0}, + "orcid": {"type": "string", "minLength": 0}, + "location": {"type": "string", "minLength": 0}, + "timezone": {"type": "string", "minLength": 0}, + "profile_image": {"type": "string", "minLength": 0}, # optional }, } format_checker = FormatChecker() - format_checker.checks("valid email")(validate_is_valid_email) + format_checker.checks("valid_email")(validate_is_valid_email) try: validate( From 26c0ed4d97a48eb0dccd125d8e8db733457294a6 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 12 Dec 2023 16:00:12 -0800 Subject: [PATCH 383/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 8 ++-- modules/etl/config/aireadi_config.py | 48 ++++++++++++++-------- modules/etl/transforms/redcap_transform.py | 30 +++++++++++++- 3 files changed, 64 insertions(+), 22 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 5ad32484..ed54f43a 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -30,7 +30,7 @@ "value": fields.Integer( required=False, readonly=True, description="Value field" ), - "x": fields.Float(required=False, readonly=True, description="X-axis field"), + "x": fields.Raw(required=False, readonly=True, description="X-axis field"), "y": fields.Float(required=False, readonly=True, description="Y-axis field"), "datetime": fields.String( required=False, readonly=True, description="Date field" @@ -367,10 +367,12 @@ def get(self, study_id: int): "redcap_api_key": redcap_project_view["project_api_key"], } | transformConfigs["redcap"] - mergedTransform = RedcapTransform(redcap_etl_config).merged + redcapTransform = RedcapTransform(redcap_etl_config) + mergedTransform = redcapTransform.merged # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["dashboard_modules"]: + print(dashboard_module) transform, module_etl_config = transformConfigs[dashboard_module["id"]] transformed = getattr(ModuleTransform(module_etl_config), transform)( mergedTransform @@ -483,7 +485,7 @@ def put(self, study_id: int): 400, ) # Clear Redis Cache - # TODO: We want to clear the cache by dashboard_id, not the whole cache! + # TODO: We want to clear the cache by dashboard_id/cache key, not the whole cache! cache.clear() update_redcap_project_dashboard_query = ( model.StudyRedcapProjectDashboard.query.get(data["dashboard_id"]) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 633d9e82..751f7541 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -55,6 +55,8 @@ computed_columns: List = [ "phenotypes", "treatments", + "scrweek", + "scryear", ] # Survey Column Groups @@ -145,6 +147,26 @@ "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), ("map_missing_values_by_columns", {"columns": data_columns}), + ( + "transform_values_by_column", + { + "column": "scrcmpdat", + "new_column_name": "scrweek", + # ISO 8601 string format token for front-end: %V + "transform": lambda x: int(datetime.strptime(x, "%Y-%m-%d").isocalendar().week), + "missing_value": missing_value_generic, + } + ), + ( + "transform_values_by_column", + { + "column": "scrcmpdat", + "new_column_name": "scryear", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: int(datetime.strptime(x, "%Y-%m-%d").isocalendar().year), + "missing_value": missing_value_generic, + } + ), ( "new_column_from_binary_columns_positive_class", { @@ -1789,11 +1811,11 @@ "strict": True, "transforms": [ { - "name": "Race Recruitment", + "name": "Race Recruitment by Site", "vtype": "DoubleDiscrete", "methods": [ { - "groups": ["siteid", "race", "scrcmpdat"], + "groups": ["siteid", "race", "scrweek"], "value": "record_id", "func": "count", } @@ -1805,7 +1827,7 @@ "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { + "group": { "name": "Race", "field": "race", "missing_value": missing_value_generic, @@ -1819,14 +1841,9 @@ }, "x": { "name": "Week of the Year", - "field": "scrcmpdat", + "field": "scrweek", "missing_value": missing_value_generic, "astype": int, - "remap": lambda x: datetime.strptime( - x["record"][x["accessors"]["x"]["field"]], "%Y-%m-%d" - ) - .isocalendar() - .week, }, "y": { "name": "Cumulative Count (N)", @@ -1848,11 +1865,11 @@ "strict": True, "transforms": [ { - "name": "Phenotype Recruitment", + "name": "Phenotype Recruitment by Site", "vtype": "DoubleDiscrete", "methods": [ { - "groups": ["siteid", "phenotypes", "scrcmpdat"], + "groups": ["siteid", "phenotypes", "scrweek"], "value": "record_id", "func": "count", } @@ -1864,7 +1881,7 @@ "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { + "group": { "name": "Phenotype", "field": "phenotypes", "missing_value": missing_value_generic, @@ -1878,14 +1895,9 @@ }, "x": { "name": "Week of the Year", - "field": "scrcmpdat", + "field": "scrweek", "missing_value": missing_value_generic, "astype": int, - "remap": lambda x: datetime.strptime( - x["record"][x["accessors"]["x"]["field"]], "%Y-%m-%d" - ) - .isocalendar() - .week, }, "y": { "name": "Cumulative Count (N)", diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index f2525622..8eb478a1 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -62,7 +62,7 @@ def __init__(self, config: dict) -> None: else { "encoding": "utf-8", "filename": "REDCapETL.log", - "level": logging.DEBUG, + "level": logging.INFO, } ) @@ -446,6 +446,34 @@ def remap_values_by_columns( df=df, columns=columns, value_map=value_map ) + # + # Transform - Values By Column + # + + def _transform_values_by_column( + self, + df: pd.DataFrame, + column: str, + new_column_name: str, + transform: Callable, + missing_value: Any, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + df[new_column_name] = df[column][df[column] != missing_value].apply(transform) + df[new_column_name] = df[new_column_name].fillna(missing_value) + return df + + def transform_values_by_column( + self, df: pd.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, + ) -> pd.DataFrame: + """ + Replace 0-length values or values with keys in + self.none_map with self.missing_value_generic. + """ + return self._transform_values_by_column( + df=df, column=column, new_column_name=new_column_name, transform=transform, missing_value=missing_value + ) + # # Transform - Map Missing Values By Columns # From 1e1f84970ab7438c84cabbf3203b46ef6f28299b Mon Sep 17 00:00:00 2001 From: Lint Action Date: Wed, 13 Dec 2023 00:01:22 +0000 Subject: [PATCH 384/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/config/aireadi_config.py | 12 ++++++++---- modules/etl/transforms/redcap_transform.py | 13 +++++++++++-- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 751f7541..09145264 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -153,9 +153,11 @@ "column": "scrcmpdat", "new_column_name": "scrweek", # ISO 8601 string format token for front-end: %V - "transform": lambda x: int(datetime.strptime(x, "%Y-%m-%d").isocalendar().week), + "transform": lambda x: int( + datetime.strptime(x, "%Y-%m-%d").isocalendar().week + ), "missing_value": missing_value_generic, - } + }, ), ( "transform_values_by_column", @@ -163,9 +165,11 @@ "column": "scrcmpdat", "new_column_name": "scryear", # ISO 8601 string format token for front-end: %Y - "transform": lambda x: int(datetime.strptime(x, "%Y-%m-%d").isocalendar().year), + "transform": lambda x: int( + datetime.strptime(x, "%Y-%m-%d").isocalendar().year + ), "missing_value": missing_value_generic, - } + }, ), ( "new_column_from_binary_columns_positive_class", diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 88827a79..9520ff6a 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -464,14 +464,23 @@ def _transform_values_by_column( return df def transform_values_by_column( - self, df: pd.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, + self, + df: pd.DataFrame, + column: str, + new_column_name: str, + transform: Callable, + missing_value: Any, ) -> pd.DataFrame: """ Replace 0-length values or values with keys in self.none_map with self.missing_value_generic. """ return self._transform_values_by_column( - df=df, column=column, new_column_name=new_column_name, transform=transform, missing_value=missing_value + df=df, + column=column, + new_column_name=new_column_name, + transform=transform, + missing_value=missing_value, ) # From f589b0c441f9bc70394951e650379620b3759fc1 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 19 Dec 2023 20:01:38 -0800 Subject: [PATCH 385/505] =?UTF-8?q?=F0=9F=9A=A8=20chore:=20run=20formatter?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/contributor.py | 2 +- apis/dataset.py | 2 +- apis/dataset_metadata/dataset_alternate_identifier.py | 2 +- apis/dataset_metadata/dataset_contributor.py | 2 +- apis/dataset_metadata/dataset_date.py | 2 +- apis/dataset_metadata/dataset_description.py | 2 +- apis/dataset_metadata/dataset_funder.py | 2 +- apis/dataset_metadata/dataset_related_item.py | 2 +- apis/dataset_metadata/dataset_rights.py | 2 +- apis/dataset_metadata/dataset_subject.py | 2 +- apis/dataset_metadata/dataset_title.py | 2 +- apis/study.py | 2 +- apis/study_metadata/study_arm.py | 2 +- apis/study_metadata/study_available_ipd.py | 2 +- apis/study_metadata/study_contact.py | 2 +- apis/study_metadata/study_identification.py | 2 +- apis/study_metadata/study_intervention.py | 2 +- apis/study_metadata/study_link.py | 2 +- apis/study_metadata/study_location.py | 2 +- apis/study_metadata/study_overall_official.py | 2 +- apis/study_metadata/study_reference.py | 2 +- tests/functional/test_study_dataset_metadata_api.py | 2 +- tests/functional/test_study_metadata_api.py | 2 +- 23 files changed, 23 insertions(+), 23 deletions(-) diff --git a/apis/contributor.py b/apis/contributor.py index fb293a40..9f282312 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,7 +1,7 @@ from collections import OrderedDict from typing import Any, Dict, List, Union -from flask import g, request, Response +from flask import Response, g, request from flask_restx import Namespace, Resource, fields import model diff --git a/apis/dataset.py b/apis/dataset.py index 2f5a2a35..0c32921c 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -1,6 +1,6 @@ import typing -from flask import request, Response +from flask import Response, request from flask_restx import Namespace, Resource, fields import model diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index ffd6ad0d..14a1c896 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,7 +1,7 @@ """API endpoints for dataset alternate identifier""" from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py index 25623f04..67458e06 100644 --- a/apis/dataset_metadata/dataset_contributor.py +++ b/apis/dataset_metadata/dataset_contributor.py @@ -1,7 +1,7 @@ """API for dataset contributor metadata""" from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 37bf73d4..96eb4652 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,7 +1,7 @@ """APIs for dataset date metadata""" from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py index 9183d624..ddd4c56a 100644 --- a/apis/dataset_metadata/dataset_description.py +++ b/apis/dataset_metadata/dataset_description.py @@ -2,7 +2,7 @@ from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index 2799e58c..b6a3dece 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,7 +1,7 @@ """API endpoints for dataset funder""" from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index ba47c185..cc526bf6 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,7 +1,7 @@ """API for dataset related item""" from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index ce5b1fea..64f172bf 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -2,7 +2,7 @@ from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py index b4837211..6aa372cc 100644 --- a/apis/dataset_metadata/dataset_subject.py +++ b/apis/dataset_metadata/dataset_subject.py @@ -2,7 +2,7 @@ from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index f91f8c97..b22d02fa 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,7 +1,7 @@ """API for dataset title metadata""" from typing import Any, Union -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study.py b/apis/study.py index 199c00c5..931b0151 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,7 +1,7 @@ """APIs for study operations""" "" from typing import Any, Union -from flask import g, request, Response +from flask import Response, g, request from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index 97336fa0..fb37b2e8 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,7 +1,7 @@ """API routes for study arm metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 04a918f4..711e5280 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,7 +1,7 @@ """API routes for study available ipd metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 4208c061..4293d95c 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -2,7 +2,7 @@ import typing from email_validator import EmailNotValidError, validate_email -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import FormatChecker, ValidationError, validate diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 48eea1e5..503e699f 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,7 +1,7 @@ """API routes for study identification metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index e9c43dcf..3accb513 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -1,7 +1,7 @@ """API routes for study intervention metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index e89ee64f..cd4d802a 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -1,7 +1,7 @@ """API routes for study link metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index c2194041..232f03b4 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -1,7 +1,7 @@ """API routes for study location metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 14ecc34e..52345980 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,7 +1,7 @@ """API routes for study overall official metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index b5f4c028..40a27f2b 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,7 +1,7 @@ """API routes for study reference metadata""" import typing -from flask import request, Response +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 55118abe..45c03b4e 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -1,8 +1,8 @@ # pylint: disable=too-many-lines """Tests for the Dataset's Metadata API endpoints""" import json - from time import sleep + import pytest diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index 9d8b2618..8cde58ac 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -1,8 +1,8 @@ # pylint: disable=too-many-lines """Tests for the Study Metadata API endpoints""" import json - from time import sleep + import pytest From ec272f1edfb959403f1839776a0186d70a891e36 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 20 Dec 2023 09:32:53 -0800 Subject: [PATCH 386/505] =?UTF-8?q?=E2=9C=A8=20feat:=20add=20support=20for?= =?UTF-8?q?=20feature=20flags=20(#36)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ✨ feat: add support for feature flags * 👷 ci: add growthbook to test workflow --- .env.example | 4 +- .github/workflows/test.yml | 1 + app.py | 21 +++++++-- config.py | 2 + poetry.lock | 95 ++++++++++++++++++++++++++++++++++++-- pyproject.toml | 42 ++++++++++------- 6 files changed, 140 insertions(+), 25 deletions(-) diff --git a/.env.example b/.env.example index 67ab406e..a5fa42a7 100644 --- a/.env.example +++ b/.env.example @@ -2,4 +2,6 @@ FAIRHUB_DATABASE_URL="postgresql://admin:root@localhost:5432/fairhub_local" FAIRHUB_SECRET="AddAny32+CharacterCountWordHereAsYourSecret" FAIRHUB_AZURE_READ_SAS_TOKEN= -FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME= \ No newline at end of file +FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME= + +FAIRHUB_GROWTHBOOK_CLIENT_KEY= \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3d0b6984..8ecc9bb4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,6 +13,7 @@ jobs: # These are simulated secrets for test workflow only. FAIRHUB_DATABASE_URL: postgresql://admin:root@localhost:5432/fairhub_local FAIRHUB_SECRET: mXrkOHXXQoMAhCOTZOV93QlncmeTwEZFPxTP1TXGiOFabE0KmuZgHWvTOLgjbv3S + FAIRHUB_GROWTHBOOK_CLIENT_KEY: ${{ secrets.FAIRHUB_GROWTHBOOK_CLIENT_KEY }} steps: - uses: actions/checkout@v2 diff --git a/app.py b/app.py index ce1ee431..e3402b59 100644 --- a/app.py +++ b/app.py @@ -6,9 +6,10 @@ from datetime import timezone import jwt -from flask import Flask, request +from flask import Flask, g, request from flask_bcrypt import Bcrypt from flask_cors import CORS +from growthbook import GrowthBook from sqlalchemy import MetaData from waitress import serve @@ -123,12 +124,27 @@ def on_before_request(): # pylint: disable = inconsistent-return-statements try: authentication() + authorization() + + # create growthbook instance + g.gb = GrowthBook( + api_host="https://cdn.growthbook.io", + client_key=config.FAIRHUB_GROWTHBOOK_CLIENT_KEY, + ) + + # load feature flags + g.gb.load_features() + except UnauthenticatedException: return "Authentication is required", 401 @app.after_request def on_after_request(resp): + # destroy growthbook instance + if hasattr(g, "gb"): + g.gb.destroy() + public_routes = [ "/auth", "/docs", @@ -140,8 +156,7 @@ def on_after_request(resp): for route in public_routes: if request.path.startswith(route): return resp - # print("after request") - # print(request.cookies.get("token")) + if "token" not in request.cookies: return resp diff --git a/config.py b/config.py index 51a7e8c4..6517a3a2 100644 --- a/config.py +++ b/config.py @@ -22,3 +22,5 @@ def get_env(key): FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") + +FAIRHUB_GROWTHBOOK_CLIENT_KEY = get_env("FAIRHUB_GROWTHBOOK_CLIENT_KEY") diff --git a/poetry.lock b/poetry.lock index 59c6b735..eafaad74 100644 --- a/poetry.lock +++ b/poetry.lock @@ -406,7 +406,7 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" +category = "main" optional = false python-versions = "*" files = [ @@ -706,6 +706,52 @@ docopt = ">=0.6" minilog = ">=2.0" requests = ">=2.28,<3.0" +[[package]] +name = "cryptography" +version = "41.0.7" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +nox = ["nox"] +pep8test = ["black", "check-sdist", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "debugpy" version = "1.6.7" @@ -1062,6 +1108,7 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -1070,6 +1117,7 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -1099,6 +1147,7 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -1107,6 +1156,7 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -1123,6 +1173,23 @@ files = [ docs = ["Sphinx", "docutils (<0.18)"] test = ["objgraph", "psutil"] +[[package]] +name = "growthbook" +version = "1.0.0" +description = "Powerful Feature flagging and A/B testing for Python apps" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "growthbook-1.0.0-py2.py3-none-any.whl", hash = "sha256:919acf8b543bd0f7696626006d2bc2aeb818bfa7b63953e6fb6b597cd2b46a43"}, + {file = "growthbook-1.0.0.tar.gz", hash = "sha256:465b9dd370a3a6dbad75b12558646d6c51e7926f311b7ad74fb3dfc76e1eb4ca"}, +] + +[package.dependencies] +cryptography = "*" +typing-extensions = "*" +urllib3 = "*" + [[package]] name = "idna" version = "3.4" @@ -1839,6 +1906,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2448,7 +2525,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2777,6 +2854,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2784,8 +2862,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2802,6 +2887,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2809,6 +2895,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3444,7 +3531,7 @@ files = [ name = "types-waitress" version = "2.1.4.9" description = "Typing stubs for waitress" -category = "main" +category = "dev" optional = false python-versions = "*" files = [ @@ -3760,4 +3847,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "f97f70aa5cac739e2d0e64da60bf877a54fc2571f2bf05bf39dad0dafa211c7d" +content-hash = "efd9b6ce34566eef2dd2f664134d65ebe2487959b5a689aab3a10cdc6d65abc9" diff --git a/pyproject.toml b/pyproject.toml index 1217a0ac..e98e63e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,9 +13,7 @@ documentation = "https://pyfairdatatools.readthedocs.io" repository = "https://github.com/AI-READI/pyfairdatatools" - -keywords = [ -] +keywords = [] classifiers = [ # TODO: update this list to match your application: https://pypi.org/pypi?%3Aaction=list_classifiers "Development Status :: 1 - Planning", @@ -33,10 +31,24 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.8.16" +# Core Flask = "^2.3.2" Flask-Cors = "^4.0.0" flask-restx = "^1.1.0" +waitress = "^2.1.2" + +# API Client +requests = "^2.31.0" + +# Database flask-sqlalchemy = "^3.0.5" +alembic = "^1.12.1" +psycopg2 = "^2.9.6" + +# Auth +flask-bcrypt = "^1.0.1" +pyjwt = "^2.8.0" +email-validator = "^2.0.0.post2" minilog = "*" art = "^6.0" @@ -45,20 +57,19 @@ dicttoxml = "^1.7.16" types-requests = "^2.30.0.0" pyflakes = "^3.0.1" -requests = "^2.31.0" faker = "^18.11.2" -pyfairdatatools = "0.1.3" -psycopg2 = "^2.9.6" -python-dotenv = "^1.0.0" -flask-bcrypt = "^1.0.1" -pyjwt = "^2.8.0" -email-validator = "^2.0.0.post2" -alembic = "^1.12.1" -waitress = "^2.1.2" +# Feature flags +growthbook = "^1.0.0" + +# Fair Data Tools +pyfairdatatools = "0.1.3" [tool.poetry.group.dev.dependencies] +# Environment +python-dotenv = "^1.0.0" + # Formatters black = "^23.3.0" tomli = "^2.0.1" @@ -121,7 +132,7 @@ lint = ["flake8", "typecheck", "pylint"] precommit = ["format", "flake8", "typecheck", "pylint"] -test = "pytest -rx -W ignore::DeprecationWarning" +test = "pytest -rx -W ignore::DeprecationWarning" test_with_capture = "pytest -s -W ignore::DeprecationWarning" jupyter = "jupyter notebook" @@ -158,10 +169,7 @@ addopts = """ --no-cov-on-fail """ -filterwarnings = [ - "ignore", - "default:::flask_restx.*", -] +filterwarnings = ["ignore", "default:::flask_restx.*"] cache_dir = ".cache/pytest/" From 91832e6d6340614140498c3452348b03f48907c1 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 20 Dec 2023 11:05:55 -0800 Subject: [PATCH 387/505] =?UTF-8?q?=E2=9E=95=20chore:=20add=20dependency?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 2213 ++++++++++++++++++++++++------------------------ pyproject.toml | 3 + 2 files changed, 1117 insertions(+), 1099 deletions(-) diff --git a/poetry.lock b/poetry.lock index eafaad74..990d7c8f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,14 +2,14 @@ [[package]] name = "alembic" -version = "1.12.1" +version = "1.13.1" description = "A database migration tool for SQLAlchemy." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, - {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, + {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, + {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, ] [package.dependencies] @@ -20,7 +20,7 @@ SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" [package.extras] -tz = ["python-dateutil"] +tz = ["backports.zoneinfo"] [[package]] name = "aniso8601" @@ -39,25 +39,26 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "3.7.1" +version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] [[package]] name = "appnope" @@ -73,23 +74,24 @@ files = [ [[package]] name = "argon2-cffi" -version = "21.3.0" -description = "The secure Argon2 password hashing algorithm." +version = "23.1.0" +description = "Argon2 for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "argon2-cffi-21.3.0.tar.gz", hash = "sha256:d384164d944190a7dd7ef22c6aa3ff197da12962bd04b17f64d4e93d934dba5b"}, - {file = "argon2_cffi-21.3.0-py3-none-any.whl", hash = "sha256:8c976986f2c5c0e5000919e6de187906cfd81fb1c72bf9d88c01177e77da7f80"}, + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, ] [package.dependencies] argon2-cffi-bindings = "*" [package.extras] -dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] -docs = ["furo", "sphinx", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] [[package]] name = "argon2-cffi-bindings" @@ -131,29 +133,34 @@ tests = ["pytest"] [[package]] name = "arrow" -version = "1.2.3" +version = "1.3.0" description = "Better dates & times for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "arrow-1.2.3-py3-none-any.whl", hash = "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2"}, - {file = "arrow-1.2.3.tar.gz", hash = "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1"}, + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, ] [package.dependencies] python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] [[package]] name = "art" -version = "6.0" +version = "6.1" description = "ASCII Art Library For Python" category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "art-6.0-py3-none-any.whl", hash = "sha256:882e6b5a4c6045c6dace31147c0b8522c8ca70177a5922f172733d116123a187"}, - {file = "art-6.0.tar.gz", hash = "sha256:884ef1c10e900387cce97789e8668125720cbf4459206641b91fc298c859cda9"}, + {file = "art-6.1-py3-none-any.whl", hash = "sha256:159819c418001467f8d79616fa0814277deac97c8a363d1eb3e7c0a31526bfc3"}, + {file = "art-6.1.tar.gz", hash = "sha256:6ab3031e3b7710039e73497b0e750cadfe04d4c1279ce3a123500dbafb9e1b64"}, ] [package.extras] @@ -161,14 +168,14 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture [[package]] name = "astroid" -version = "2.15.6" +version = "2.15.8" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "astroid-2.15.6-py3-none-any.whl", hash = "sha256:389656ca57b6108f939cf5d2f9a2a825a3be50ba9d589670f393236e0a03b91c"}, - {file = "astroid-2.15.6.tar.gz", hash = "sha256:903f024859b7c7687d7a7f3a3f73b17301f8e42dfd9cc9df9d4418172d3e2dbd"}, + {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, + {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, ] [package.dependencies] @@ -181,21 +188,22 @@ wrapt = [ [[package]] name = "asttokens" -version = "2.2.1" +version = "2.4.1" description = "Annotate AST trees with source code positions" category = "dev" optional = false python-versions = "*" files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] [package.dependencies] -six = "*" +six = ">=1.12.0" [package.extras] -test = ["astroid", "pytest"] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "async-lru" @@ -233,19 +241,22 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "babel" -version = "2.12.1" +version = "2.14.0" description = "Internationalization utilities" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, - {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "backcall" version = "0.2.0" @@ -260,33 +271,39 @@ files = [ [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.2" description = "Modern password hashing for your software and your servers" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, ] [package.extras] @@ -314,34 +331,34 @@ lxml = ["lxml"] [[package]] name = "black" -version = "23.7.0" +version = "23.12.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, + {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, + {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, + {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, + {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, + {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, + {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, + {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, + {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, + {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, + {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, + {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, + {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"}, + {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"}, + {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"}, + {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"}, + {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"}, + {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"}, + {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"}, + {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"}, + {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, + {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, ] [package.dependencies] @@ -351,24 +368,24 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bleach" -version = "6.0.0" +version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "bleach-6.0.0-py3-none-any.whl", hash = "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4"}, - {file = "bleach-6.0.0.tar.gz", hash = "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414"}, + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, ] [package.dependencies] @@ -376,104 +393,92 @@ six = ">=1.9.0" webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.2)"] +css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "blinker" -version = "1.6.2" +version = "1.7.0" description = "Fast, simple object-to-object and broadcast signaling" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, - {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -481,99 +486,114 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -593,92 +613,82 @@ files = [ [[package]] name = "comm" -version = "0.1.3" +version = "0.2.0" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "comm-0.1.3-py3-none-any.whl", hash = "sha256:16613c6211e20223f215fc6d3b266a247b6e2641bf4e0a3ad34cb1aff2aa3f37"}, - {file = "comm-0.1.3.tar.gz", hash = "sha256:a61efa9daffcfbe66fd643ba966f846a624e4e6d6767eda9cf6e993aadaab93e"}, + {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, + {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, ] [package.dependencies] -traitlets = ">=5.3" +traitlets = ">=4" [package.extras] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff (>=0.0.156)"] test = ["pytest"] -typing = ["mypy (>=0.990)"] [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.4" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aff2bd3d585969cc4486bfc69655e862028b689404563e6b549e6a8244f226df"}, + {file = "coverage-7.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4353923f38d752ecfbd3f1f20bf7a3546993ae5ecd7c07fd2f25d40b4e54571"}, + {file = "coverage-7.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea473c37872f0159294f7073f3fa72f68b03a129799f3533b2bb44d5e9fa4f82"}, + {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5214362abf26e254d749fc0c18af4c57b532a4bfde1a057565616dd3b8d7cc94"}, + {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f99b7d3f7a7adfa3d11e3a48d1a91bb65739555dd6a0d3fa68aa5852d962e5b1"}, + {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:74397a1263275bea9d736572d4cf338efaade2de9ff759f9c26bcdceb383bb49"}, + {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f154bd866318185ef5865ace5be3ac047b6d1cc0aeecf53bf83fe846f4384d5d"}, + {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e0d84099ea7cba9ff467f9c6f747e3fc3906e2aadac1ce7b41add72e8d0a3712"}, + {file = "coverage-7.3.4-cp310-cp310-win32.whl", hash = "sha256:3f477fb8a56e0c603587b8278d9dbd32e54bcc2922d62405f65574bd76eba78a"}, + {file = "coverage-7.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:c75738ce13d257efbb6633a049fb2ed8e87e2e6c2e906c52d1093a4d08d67c6b"}, + {file = "coverage-7.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:997aa14b3e014339d8101b9886063c5d06238848905d9ad6c6eabe533440a9a7"}, + {file = "coverage-7.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a9c5bc5db3eb4cd55ecb8397d8e9b70247904f8eca718cc53c12dcc98e59fc8"}, + {file = "coverage-7.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27ee94f088397d1feea3cb524e4313ff0410ead7d968029ecc4bc5a7e1d34fbf"}, + {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ce03e25e18dd9bf44723e83bc202114817f3367789052dc9e5b5c79f40cf59d"}, + {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85072e99474d894e5df582faec04abe137b28972d5e466999bc64fc37f564a03"}, + {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a877810ef918d0d345b783fc569608804f3ed2507bf32f14f652e4eaf5d8f8d0"}, + {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ac17b94ab4ca66cf803f2b22d47e392f0977f9da838bf71d1f0db6c32893cb9"}, + {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36d75ef2acab74dc948d0b537ef021306796da551e8ac8b467810911000af66a"}, + {file = "coverage-7.3.4-cp311-cp311-win32.whl", hash = "sha256:47ee56c2cd445ea35a8cc3ad5c8134cb9bece3a5cb50bb8265514208d0a65928"}, + {file = "coverage-7.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:11ab62d0ce5d9324915726f611f511a761efcca970bd49d876cf831b4de65be5"}, + {file = "coverage-7.3.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:33e63c578f4acce1b6cd292a66bc30164495010f1091d4b7529d014845cd9bee"}, + {file = "coverage-7.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:782693b817218169bfeb9b9ba7f4a9f242764e180ac9589b45112571f32a0ba6"}, + {file = "coverage-7.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4277ddaad9293454da19121c59f2d850f16bcb27f71f89a5c4836906eb35ef"}, + {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d892a19ae24b9801771a5a989fb3e850bd1ad2e2b6e83e949c65e8f37bc67a1"}, + {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3024ec1b3a221bd10b5d87337d0373c2bcaf7afd86d42081afe39b3e1820323b"}, + {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1c3e9d2bbd6f3f79cfecd6f20854f4dc0c6e0ec317df2b265266d0dc06535f1"}, + {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e91029d7f151d8bf5ab7d8bfe2c3dbefd239759d642b211a677bc0709c9fdb96"}, + {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6879fe41c60080aa4bb59703a526c54e0412b77e649a0d06a61782ecf0853ee1"}, + {file = "coverage-7.3.4-cp312-cp312-win32.whl", hash = "sha256:fd2f8a641f8f193968afdc8fd1697e602e199931012b574194052d132a79be13"}, + {file = "coverage-7.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:d1d0ce6c6947a3a4aa5479bebceff2c807b9f3b529b637e2b33dea4468d75fc7"}, + {file = "coverage-7.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36797b3625d1da885b369bdaaa3b0d9fb8865caed3c2b8230afaa6005434aa2f"}, + {file = "coverage-7.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfed0ec4b419fbc807dec417c401499ea869436910e1ca524cfb4f81cf3f60e7"}, + {file = "coverage-7.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97ff5a9fc2ca47f3383482858dd2cb8ddbf7514427eecf5aa5f7992d0571429"}, + {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:607b6c6b35aa49defaebf4526729bd5238bc36fe3ef1a417d9839e1d96ee1e4c"}, + {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8e258dcc335055ab59fe79f1dec217d9fb0cdace103d6b5c6df6b75915e7959"}, + {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a02ac7c51819702b384fea5ee033a7c202f732a2a2f1fe6c41e3d4019828c8d3"}, + {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b710869a15b8caf02e31d16487a931dbe78335462a122c8603bb9bd401ff6fb2"}, + {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6a23ae9348a7a92e7f750f9b7e828448e428e99c24616dec93a0720342f241d"}, + {file = "coverage-7.3.4-cp38-cp38-win32.whl", hash = "sha256:758ebaf74578b73f727acc4e8ab4b16ab6f22a5ffd7dd254e5946aba42a4ce76"}, + {file = "coverage-7.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:309ed6a559bc942b7cc721f2976326efbfe81fc2b8f601c722bff927328507dc"}, + {file = "coverage-7.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aefbb29dc56317a4fcb2f3857d5bce9b881038ed7e5aa5d3bcab25bd23f57328"}, + {file = "coverage-7.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:183c16173a70caf92e2dfcfe7c7a576de6fa9edc4119b8e13f91db7ca33a7923"}, + {file = "coverage-7.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a4184dcbe4f98d86470273e758f1d24191ca095412e4335ff27b417291f5964"}, + {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93698ac0995516ccdca55342599a1463ed2e2d8942316da31686d4d614597ef9"}, + {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb220b3596358a86361139edce40d97da7458412d412e1e10c8e1970ee8c09ab"}, + {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5b14abde6f8d969e6b9dd8c7a013d9a2b52af1235fe7bebef25ad5c8f47fa18"}, + {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:610afaf929dc0e09a5eef6981edb6a57a46b7eceff151947b836d869d6d567c1"}, + {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed790728fb71e6b8247bd28e77e99d0c276dff952389b5388169b8ca7b1c28"}, + {file = "coverage-7.3.4-cp39-cp39-win32.whl", hash = "sha256:c15fdfb141fcf6a900e68bfa35689e1256a670db32b96e7a931cab4a0e1600e5"}, + {file = "coverage-7.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:38d0b307c4d99a7aca4e00cad4311b7c51b7ac38fb7dea2abe0d182dd4008e05"}, + {file = "coverage-7.3.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b1e0f25ae99cf247abfb3f0fac7ae25739e4cd96bf1afa3537827c576b4847e5"}, + {file = "coverage-7.3.4.tar.gz", hash = "sha256:020d56d2da5bc22a0e00a5b0d54597ee91ad72446fa4cf1b97c35022f6b6dbf0"}, ] [package.dependencies] @@ -754,30 +764,30 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "debugpy" -version = "1.6.7" +version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"}, - {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"}, - {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"}, - {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"}, - {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"}, - {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"}, - {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"}, - {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"}, - {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"}, - {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"}, - {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"}, - {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"}, - {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"}, - {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"}, - {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"}, - {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"}, - {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"}, - {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"}, + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, ] [[package]] @@ -864,14 +874,14 @@ files = [ [[package]] name = "email-validator" -version = "2.0.0.post2" +version = "2.1.0.post1" description = "A robust email address syntax and deliverability validation library." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "email_validator-2.0.0.post2-py3-none-any.whl", hash = "sha256:2466ba57cda361fb7309fd3d5a225723c788ca4bbad32a0ebd5373b99730285c"}, - {file = "email_validator-2.0.0.post2.tar.gz", hash = "sha256:1ff6e86044200c56ae23595695c54e9614f4a9551e0e393614f764860b3d7900"}, + {file = "email_validator-2.1.0.post1-py3-none-any.whl", hash = "sha256:c973053efbeddfef924dc0bd93f6e77a1ea7ee0fce935aea7103c7a3d6d2d637"}, + {file = "email_validator-2.1.0.post1.tar.gz", hash = "sha256:a4b0bd1cf55f073b924258d19321b1f3aa74b4b5a71a42c305575dba920e1a44"}, ] [package.dependencies] @@ -880,14 +890,14 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -895,18 +905,18 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "1.2.0" +version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, - {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, ] [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "faker" @@ -925,14 +935,14 @@ python-dateutil = ">=2.4" [[package]] name = "fastjsonschema" -version = "2.18.0" +version = "2.19.0" description = "Fastest Python implementation of JSON schema" category = "dev" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.18.0-py3-none-any.whl", hash = "sha256:128039912a11a807068a7c87d0da36660afbfd7202780db26c4aa7153cfdc799"}, - {file = "fastjsonschema-2.18.0.tar.gz", hash = "sha256:e820349dd16f806e4bd1467a138dced9def4bc7d6213a34295272a6cac95b5bd"}, + {file = "fastjsonschema-2.19.0-py3-none-any.whl", hash = "sha256:b9fd1a2dd6971dbc7fee280a95bd199ae0dd9ce22beb91cc75e9c1c528a5170e"}, + {file = "fastjsonschema-2.19.0.tar.gz", hash = "sha256:e25df6647e1bc4a26070b700897b07b542ec898dd4f1f6ea013e7f6a88417225"}, ] [package.extras] @@ -957,14 +967,14 @@ pyflakes = ">=3.1.0,<3.2.0" [[package]] name = "flask" -version = "2.3.2" +version = "2.3.3" description = "A simple framework for building complex web applications." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Flask-2.3.2-py3-none-any.whl", hash = "sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0"}, - {file = "Flask-2.3.2.tar.gz", hash = "sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef"}, + {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, + {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, ] [package.dependencies] @@ -973,7 +983,7 @@ click = ">=8.1.3" importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" -Werkzeug = ">=2.3.3" +Werkzeug = ">=2.3.7" [package.extras] async = ["asgiref (>=3.2)"] @@ -1010,45 +1020,61 @@ files = [ [package.dependencies] Flask = ">=0.9" +[[package]] +name = "flask-mail" +version = "0.9.1" +description = "Flask extension for sending email" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, +] + +[package.dependencies] +blinker = "*" +Flask = "*" + [[package]] name = "flask-restx" -version = "1.1.0" +version = "1.3.0" description = "Fully featured framework for fast, easy and documented API development with Flask" category = "main" optional = false python-versions = "*" files = [ - {file = "flask-restx-1.1.0.tar.gz", hash = "sha256:62b6b6c9de65e5960cf4f8b35e1bd3eca6998838a01b2f71e2a9d4c14a4ccd14"}, - {file = "flask_restx-1.1.0-py2.py3-none-any.whl", hash = "sha256:eaff65f6edd400ee00b40475496d61a4937b28371dfed039d3fd180d206fee4e"}, + {file = "flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728"}, + {file = "flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691"}, ] [package.dependencies] aniso8601 = ">=0.82" Flask = ">=0.8,<2.0.0 || >2.0.0" +importlib-resources = "*" jsonschema = "*" pytz = "*" werkzeug = "!=2.0.0" [package.extras] -dev = ["Faker (==2.0.0)", "black", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.2.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "tox", "twine (==3.8.0)", "tzlocal"] +dev = ["Faker (==2.0.0)", "black", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "tox", "twine (==3.8.0)", "tzlocal"] doc = ["Sphinx (==5.3.0)", "alabaster (==0.7.12)", "sphinx-issues (==3.0.1)"] -test = ["Faker (==2.0.0)", "blinker", "invoke (==2.0.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.2.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "twine (==3.8.0)", "tzlocal"] +test = ["Faker (==2.0.0)", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "twine (==3.8.0)", "tzlocal"] [[package]] name = "flask-sqlalchemy" -version = "3.0.5" +version = "3.1.1" description = "Add SQLAlchemy support to your Flask application." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "flask_sqlalchemy-3.0.5-py3-none-any.whl", hash = "sha256:cabb6600ddd819a9f859f36515bb1bd8e7dbf30206cc679d2b081dff9e383283"}, - {file = "flask_sqlalchemy-3.0.5.tar.gz", hash = "sha256:c5765e58ca145401b52106c0f46178569243c5da25556be2c231ecc60867c5b1"}, + {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, + {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, ] [package.dependencies] flask = ">=2.2.5" -sqlalchemy = ">=1.4.18" +sqlalchemy = ">=2.0.16" [[package]] name = "fqdn" @@ -1064,14 +1090,14 @@ files = [ [[package]] name = "freezegun" -version = "1.2.2" +version = "1.4.0" description = "Let your Python tests travel through time" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, - {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, ] [package.dependencies] @@ -1097,80 +1123,74 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "greenlet" -version = "2.0.2" +version = "3.0.2" description = "Lightweight in-process concurrent programming" category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -files = [ - {file = "greenlet-2.0.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bdfea8c661e80d3c1c99ad7c3ff74e6e87184895bbaca6ee8cc61209f8b9b85d"}, - {file = "greenlet-2.0.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9d14b83fab60d5e8abe587d51c75b252bcc21683f24699ada8fb275d7712f5a9"}, - {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, - {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, - {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, - {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, - {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d75209eed723105f9596807495d58d10b3470fa6732dd6756595e89925ce2470"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3a51c9751078733d88e013587b108f1b7a1fb106d402fb390740f002b6f6551a"}, - {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, - {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, - {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, - {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, - {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, - {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, - {file = "greenlet-2.0.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:910841381caba4f744a44bf81bfd573c94e10b3045ee00de0cbf436fe50673a6"}, - {file = "greenlet-2.0.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:18a7f18b82b52ee85322d7a7874e676f34ab319b9f8cce5de06067384aa8ff43"}, - {file = "greenlet-2.0.2-cp35-cp35m-win32.whl", hash = "sha256:03a8f4f3430c3b3ff8d10a2a86028c660355ab637cee9333d63d66b56f09d52a"}, - {file = "greenlet-2.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4b58adb399c4d61d912c4c331984d60eb66565175cdf4a34792cd9600f21b394"}, - {file = "greenlet-2.0.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:703f18f3fda276b9a916f0934d2fb6d989bf0b4fb5a64825260eb9bfd52d78f0"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:32e5b64b148966d9cccc2c8d35a671409e45f195864560829f395a54226408d3"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dd11f291565a81d71dab10b7033395b7a3a5456e637cf997a6f33ebdf06f8db"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0f72c9ddb8cd28532185f54cc1453f2c16fb417a08b53a855c4e6a418edd099"}, - {file = "greenlet-2.0.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd021c754b162c0fb55ad5d6b9d960db667faad0fa2ff25bb6e1301b0b6e6a75"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:3c9b12575734155d0c09d6c3e10dbd81665d5c18e1a7c6597df72fd05990c8cf"}, - {file = "greenlet-2.0.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b9ec052b06a0524f0e35bd8790686a1da006bd911dd1ef7d50b77bfbad74e292"}, - {file = "greenlet-2.0.2-cp36-cp36m-win32.whl", hash = "sha256:dbfcfc0218093a19c252ca8eb9aee3d29cfdcb586df21049b9d777fd32c14fd9"}, - {file = "greenlet-2.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:9f35ec95538f50292f6d8f2c9c9f8a3c6540bbfec21c9e5b4b751e0a7c20864f"}, - {file = "greenlet-2.0.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:d5508f0b173e6aa47273bdc0a0b5ba055b59662ba7c7ee5119528f466585526b"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f82d4d717d8ef19188687aa32b8363e96062911e63ba22a0cff7802a8e58e5f1"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c59a2120b55788e800d82dfa99b9e156ff8f2227f07c5e3012a45a399620b7"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2780572ec463d44c1d3ae850239508dbeb9fed38e294c68d19a24d925d9223ca"}, - {file = "greenlet-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937e9020b514ceedb9c830c55d5c9872abc90f4b5862f89c0887033ae33c6f73"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:36abbf031e1c0f79dd5d596bfaf8e921c41df2bdf54ee1eed921ce1f52999a86"}, - {file = "greenlet-2.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:18e98fb3de7dba1c0a852731c3070cf022d14f0d68b4c87a19cc1016f3bb8b33"}, - {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, - {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, - {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, - {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2162a36d3de67ee896c43effcd5ee3de247eb00354db411feb025aa319857"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0bf60faf0bc2468089bdc5edd10555bab6e85152191df713e2ab1fcc86382b5a"}, - {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, - {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, - {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, - {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be4ed120b52ae4d974aa40215fcdfde9194d63541c7ded40ee12eb4dda57b76b"}, - {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c817e84245513926588caf1152e3b559ff794d505555211ca041f032abbb6b"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1a819eef4b0e0b96bb0d98d797bef17dc1b4a10e8d7446be32d1da33e095dbb8"}, - {file = "greenlet-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7efde645ca1cc441d6dc4b48c0f7101e8d86b54c8530141b09fd31cef5149ec9"}, - {file = "greenlet-2.0.2-cp39-cp39-win32.whl", hash = "sha256:ea9872c80c132f4663822dd2a08d404073a5a9b5ba6155bea72fb2a79d1093b5"}, - {file = "greenlet-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:db1a39669102a1d8d12b57de2bb7e2ec9066a6f2b3da35ae511ff93b01b5d564"}, - {file = "greenlet-2.0.2.tar.gz", hash = "sha256:e7c8dc13af7db097bed64a051d2dd49e9f0af495c26995c00a9ee842690d34c0"}, -] - -[package.extras] -docs = ["Sphinx", "docutils (<0.18)"] +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9acd8fd67c248b8537953cb3af8787c18a87c33d4dcf6830e410ee1f95a63fd4"}, + {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:339c0272a62fac7e602e4e6ec32a64ff9abadc638b72f17f6713556ed011d493"}, + {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38878744926cec29b5cc3654ef47f3003f14bfbba7230e3c8492393fe29cc28b"}, + {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b3f0497db77cfd034f829678b28267eeeeaf2fc21b3f5041600f7617139e6773"}, + {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1a8a08de7f68506a38f9a2ddb26bbd1480689e66d788fcd4b5f77e2d9ecfcc"}, + {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:89a6f6ddcbef4000cda7e205c4c20d319488ff03db961d72d4e73519d2465309"}, + {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c1f647fe5b94b51488b314c82fdda10a8756d650cee8d3cd29f657c6031bdf73"}, + {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9560c580c896030ff9c311c603aaf2282234643c90d1dec738a1d93e3e53cd51"}, + {file = "greenlet-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2e9c5423046eec21f6651268cb674dfba97280701e04ef23d312776377313206"}, + {file = "greenlet-3.0.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1fd25dfc5879a82103b3d9e43fa952e3026c221996ff4d32a9c72052544835d"}, + {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfdc950dd25f25d6582952e58521bca749cf3eeb7a9bad69237024308c8196"}, + {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edf7a1daba1f7c54326291a8cde58da86ab115b78c91d502be8744f0aa8e3ffa"}, + {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4cf532bf3c58a862196b06947b1b5cc55503884f9b63bf18582a75228d9950e"}, + {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e79fb5a9fb2d0bd3b6573784f5e5adabc0b0566ad3180a028af99523ce8f6138"}, + {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:006c1028ac0cfcc4e772980cfe73f5476041c8c91d15d64f52482fc571149d46"}, + {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fefd5eb2c0b1adffdf2802ff7df45bfe65988b15f6b972706a0e55d451bffaea"}, + {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c0fdb8142742ee68e97c106eb81e7d3e883cc739d9c5f2b28bc38a7bafeb6d1"}, + {file = "greenlet-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:8f8d14a0a4e8c670fbce633d8b9a1ee175673a695475acd838e372966845f764"}, + {file = "greenlet-3.0.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:654b84c9527182036747938b81938f1d03fb8321377510bc1854a9370418ab66"}, + {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bc4fde0842ff2b9cf33382ad0b4db91c2582db836793d58d174c569637144"}, + {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27b142a9080bdd5869a2fa7ebf407b3c0b24bd812db925de90e9afe3c417fd6"}, + {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0df7eed98ea23b20e9db64d46eb05671ba33147df9405330695bcd81a73bb0c9"}, + {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5d60805057d8948065338be6320d35e26b0a72f45db392eb32b70dd6dc9227"}, + {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e0e28f5233d64c693382f66d47c362b72089ebf8ac77df7e12ac705c9fa1163d"}, + {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e4bfa752b3688d74ab1186e2159779ff4867644d2b1ebf16db14281f0445377"}, + {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c42bb589e6e9f9d8bdd79f02f044dff020d30c1afa6e84c0b56d1ce8a324553c"}, + {file = "greenlet-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:b2cedf279ca38ef3f4ed0d013a6a84a7fc3d9495a716b84a5fc5ff448965f251"}, + {file = "greenlet-3.0.2-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:6d65bec56a7bc352bcf11b275b838df618651109074d455a772d3afe25390b7d"}, + {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0acadbc3f72cb0ee85070e8d36bd2a4673d2abd10731ee73c10222cf2dd4713c"}, + {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14b5d999aefe9ffd2049ad19079f733c3aaa426190ffecadb1d5feacef8fe397"}, + {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f27aa32466993c92d326df982c4acccd9530fe354e938d9e9deada563e71ce76"}, + {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f34a765c5170c0673eb747213a0275ecc749ab3652bdbec324621ed5b2edaef"}, + {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:520fcb53a39ef90f5021c77606952dbbc1da75d77114d69b8d7bded4a8e1a813"}, + {file = "greenlet-3.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1fceb5351ab1601903e714c3028b37f6ea722be6873f46e349a960156c05650"}, + {file = "greenlet-3.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7363756cc439a503505b67983237d1cc19139b66488263eb19f5719a32597836"}, + {file = "greenlet-3.0.2-cp37-cp37m-win32.whl", hash = "sha256:d5547b462b8099b84746461e882a3eb8a6e3f80be46cb6afb8524eeb191d1a30"}, + {file = "greenlet-3.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:950e21562818f9c771989b5b65f990e76f4ac27af66e1bb34634ae67886ede2a"}, + {file = "greenlet-3.0.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d64643317e76b4b41fdba659e7eca29634e5739b8bc394eda3a9127f697ed4b0"}, + {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f9ea7c2c9795549653b6f7569f6bc75d2c7d1f6b2854eb8ce0bc6ec3cb2dd88"}, + {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db4233358d3438369051a2f290f1311a360d25c49f255a6c5d10b5bcb3aa2b49"}, + {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bf77b41798e8417657245b9f3649314218a4a17aefb02bb3992862df32495"}, + {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d0df07a38e41a10dfb62c6fc75ede196572b580f48ee49b9282c65639f3965"}, + {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10d247260db20887ae8857c0cbc750b9170f0b067dd7d38fb68a3f2334393bd3"}, + {file = "greenlet-3.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a37ae53cca36823597fd5f65341b6f7bac2dd69ecd6ca01334bb795460ab150b"}, + {file = "greenlet-3.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:80d068e4b6e2499847d916ef64176811ead6bf210a610859220d537d935ec6fd"}, + {file = "greenlet-3.0.2-cp38-cp38-win32.whl", hash = "sha256:b1405614692ac986490d10d3e1a05e9734f473750d4bee3cf7d1286ef7af7da6"}, + {file = "greenlet-3.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8756a94ed8f293450b0e91119eca2a36332deba69feb2f9ca410d35e74eae1e4"}, + {file = "greenlet-3.0.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2c93cd03acb1499ee4de675e1a4ed8eaaa7227f7949dc55b37182047b006a7aa"}, + {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dac09e3c0b78265d2e6d3cbac2d7c48bd1aa4b04a8ffeda3adde9f1688df2c3"}, + {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ee59c4627c8c4bb3e15949fbcd499abd6b7f4ad9e0bfcb62c65c5e2cabe0ec4"}, + {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18fe39d70d482b22f0014e84947c5aaa7211fb8e13dc4cc1c43ed2aa1db06d9a"}, + {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84bef3cfb6b6bfe258c98c519811c240dbc5b33a523a14933a252e486797c90"}, + {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aecea0442975741e7d69daff9b13c83caff8c13eeb17485afa65f6360a045765"}, + {file = "greenlet-3.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f260e6c2337871a52161824058923df2bbddb38bc11a5cbe71f3474d877c5bd9"}, + {file = "greenlet-3.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fc14dd9554f88c9c1fe04771589ae24db76cd56c8f1104e4381b383d6b71aff8"}, + {file = "greenlet-3.0.2-cp39-cp39-win32.whl", hash = "sha256:bfcecc984d60b20ffe30173b03bfe9ba6cb671b0be1e95c3e2056d4fe7006590"}, + {file = "greenlet-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:c235131bf59d2546bb3ebaa8d436126267392f2e51b85ff45ac60f3a26549af0"}, + {file = "greenlet-3.0.2.tar.gz", hash = "sha256:1c1129bc47266d83444c85a8e990ae22688cf05fb20d7951fd2866007c2ba9bc"}, +] + +[package.extras] +docs = ["Sphinx"] test = ["objgraph", "psutil"] [[package]] @@ -1192,54 +1212,54 @@ urllib3 = "*" [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "7.0.0" description = "Read metadata from Python packages" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, + {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" -version = "6.0.0" +version = "6.1.1" description = "Read resources from Python packages" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"}, - {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"}, + {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, + {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -1255,14 +1275,14 @@ files = [ [[package]] name = "ipykernel" -version = "6.25.0" +version = "6.27.1" description = "IPython Kernel for Jupyter" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.25.0-py3-none-any.whl", hash = "sha256:f0042e867ac3f6bca1679e6a88cbd6a58ed93a44f9d0866aecde6efe8de76659"}, - {file = "ipykernel-6.25.0.tar.gz", hash = "sha256:e342ce84712861be4b248c4a73472be4702c1b0dd77448bfd6bcfb3af9d5ddf9"}, + {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, + {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, ] [package.dependencies] @@ -1289,14 +1309,14 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.12.2" +version = "8.12.3" description = "IPython: Productive Interactive Computing" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "ipython-8.12.2-py3-none-any.whl", hash = "sha256:ea8801f15dfe4ffb76dea1b09b847430ffd70d827b41735c64a0638a04103bfc"}, - {file = "ipython-8.12.2.tar.gz", hash = "sha256:c7b80eb7f5a855a88efc971fda506ff7a91c280b42cdae26643e0f601ea281ea"}, + {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, + {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, ] [package.dependencies] @@ -1327,36 +1347,24 @@ qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] -[[package]] -name = "ipython-genutils" -version = "0.2.0" -description = "Vestigial utilities from IPython" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] - [[package]] name = "ipywidgets" -version = "8.1.0" +version = "8.1.1" description = "Jupyter interactive widgets" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "ipywidgets-8.1.0-py3-none-any.whl", hash = "sha256:6c8396cc7b8c95dfb4e9ab0054f48c002f045e7e5d7ae523f559d64e525a98ab"}, - {file = "ipywidgets-8.1.0.tar.gz", hash = "sha256:ce97dd90525b3066fd00094690964e7eac14cf9b7745d35565b5eeac20cce687"}, + {file = "ipywidgets-8.1.1-py3-none-any.whl", hash = "sha256:2b88d728656aea3bbfd05d32c747cfd0078f9d7e159cf982433b58ad717eed7f"}, + {file = "ipywidgets-8.1.1.tar.gz", hash = "sha256:40211efb556adec6fa450ccc2a77d59ca44a060f4f9f136833df59c9f538e6e8"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.7,<3.1.0" +jupyterlab-widgets = ">=3.0.9,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.7,<4.1.0" +widgetsnbextension = ">=4.0.9,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] @@ -1378,21 +1386,18 @@ arrow = ">=0.15.0" [[package]] name = "isort" -version = "5.12.0" +version = "5.13.2" description = "A Python utility / library to sort Python imports." category = "dev" optional = false python-versions = ">=3.8.0" files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, ] [package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] +colors = ["colorama (>=0.4.6)"] [[package]] name = "itsdangerous" @@ -1408,14 +1413,14 @@ files = [ [[package]] name = "jedi" -version = "0.19.0" +version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.19.0-py2.py3-none-any.whl", hash = "sha256:cb8ce23fbccff0025e9386b5cf85e892f94c9b822378f8da49970471335ac64e"}, - {file = "jedi-0.19.0.tar.gz", hash = "sha256:bcf9894f1753969cbac8022a8c2eaee06bfa3724e4192470aaffe7eb6272b0c4"}, + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, ] [package.dependencies] @@ -1424,7 +1429,7 @@ parso = ">=0.8.3,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" @@ -1473,14 +1478,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.19.1" +version = "4.20.0" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, + {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, + {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, ] [package.dependencies] @@ -1505,19 +1510,19 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.7.1" +version = "2023.11.2" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, + {file = "jsonschema_specifications-2023.11.2-py3-none-any.whl", hash = "sha256:e74ba7c0a65e8cb49dc26837d6cfe576557084a8b423ed16a420984228104f93"}, + {file = "jsonschema_specifications-2023.11.2.tar.gz", hash = "sha256:9472fc4fea474cd74bea4a2b190daeccb5a9e4db2ea80efcf7a1b582fc9a81b8"}, ] [package.dependencies] importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} -referencing = ">=0.28.0" +referencing = ">=0.31.0" [[package]] name = "jupyter" @@ -1542,14 +1547,14 @@ qtconsole = "*" [[package]] name = "jupyter-client" -version = "8.3.0" +version = "8.6.0" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, - {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, + {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, + {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, ] [package.dependencies] @@ -1591,14 +1596,14 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.3.1" +version = "5.5.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"}, - {file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"}, + {file = "jupyter_core-5.5.1-py3-none-any.whl", hash = "sha256:220dfb00c45f0d780ce132bb7976b58263f81a3ada6e90a9b6823785a424f739"}, + {file = "jupyter_core-5.5.1.tar.gz", hash = "sha256:1553311a97ccd12936037f36b9ab4d6ae8ceea6ad2d5c90d94a909e752178e40"}, ] [package.dependencies] @@ -1607,19 +1612,19 @@ pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_ traitlets = ">=5.3" [package.extras] -docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.7.0" +version = "0.9.0" description = "Jupyter Event System library" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_events-0.7.0-py3-none-any.whl", hash = "sha256:4753da434c13a37c3f3c89b500afa0c0a6241633441421f6adafe2fb2e2b924e"}, - {file = "jupyter_events-0.7.0.tar.gz", hash = "sha256:7be27f54b8388c03eefea123a4f79247c5b9381c49fb1cd48615ee191eb12615"}, + {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, + {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, ] [package.dependencies] @@ -1638,14 +1643,14 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.0" +version = "2.2.1" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.0.tar.gz", hash = "sha256:8ebbcb533adb41e5d635eb8fe82956b0aafbf0fd443b6c4bfa906edeeb8635a1"}, - {file = "jupyter_lsp-2.2.0-py3-none-any.whl", hash = "sha256:9e06b8b4f7dd50300b70dd1a78c0c3b0c3d8fa68e0f2d8a5d1fbab62072aca3f"}, + {file = "jupyter-lsp-2.2.1.tar.gz", hash = "sha256:b17fab6d70fe83c8896b0cff59237640038247c196056b43684a0902b6a9e0fb"}, + {file = "jupyter_lsp-2.2.1-py3-none-any.whl", hash = "sha256:17a689910c5e4ae5e7d334b02f31d08ffbe98108f6f658fb05e4304b4345368b"}, ] [package.dependencies] @@ -1654,14 +1659,14 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.7.0" +version = "2.12.1" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.7.0-py3-none-any.whl", hash = "sha256:6a77912aff643e53fa14bdb2634884b52b784a4be77ce8e93f7283faed0f0849"}, - {file = "jupyter_server-2.7.0.tar.gz", hash = "sha256:36da0a266d31a41ac335a366c88933c17dfa5bb817a48f5c02c16d303bc9477f"}, + {file = "jupyter_server-2.12.1-py3-none-any.whl", hash = "sha256:fd030dd7be1ca572e4598203f718df6630c12bd28a599d7f1791c4d7938e1010"}, + {file = "jupyter_server-2.12.1.tar.gz", hash = "sha256:dc77b7dcc5fc0547acba2b2844f01798008667201eea27c6319ff9257d700a6d"}, ] [package.dependencies] @@ -1670,7 +1675,7 @@ argon2-cffi = "*" jinja2 = "*" jupyter-client = ">=7.4.4" jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -jupyter-events = ">=0.6.0" +jupyter-events = ">=0.9.0" jupyter-server-terminals = "*" nbconvert = ">=6.4.4" nbformat = ">=5.3.0" @@ -1679,7 +1684,7 @@ packaging = "*" prometheus-client = "*" pywinpty = {version = "*", markers = "os_name == \"nt\""} pyzmq = ">=24" -send2trash = "*" +send2trash = ">=1.8.2" terminado = ">=0.8.3" tornado = ">=6.2.0" traitlets = ">=5.6.0" @@ -1691,14 +1696,14 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc [[package]] name = "jupyter-server-terminals" -version = "0.4.4" +version = "0.5.0" description = "A Jupyter Server Extension Providing Terminals." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.4.4-py3-none-any.whl", hash = "sha256:75779164661cec02a8758a5311e18bb8eb70c4e86c6b699403100f1585a12a36"}, - {file = "jupyter_server_terminals-0.4.4.tar.gz", hash = "sha256:57ab779797c25a7ba68e97bcfb5d7740f2b5e8a83b5e8102b10438041a7eac5d"}, + {file = "jupyter_server_terminals-0.5.0-py3-none-any.whl", hash = "sha256:2fc0692c883bfd891f4fba0c4b4a684a37234b0ba472f2e97ed0a3888f46e1e4"}, + {file = "jupyter_server_terminals-0.5.0.tar.gz", hash = "sha256:ebcd68c9afbf98a480a533e6f3266354336e645536953b7abcc7bdeebc0154a3"}, ] [package.dependencies] @@ -1706,19 +1711,19 @@ pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} terminado = ">=0.8.3" [package.extras] -docs = ["jinja2", "jupyter-server", "mistune (<3.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] -test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] [[package]] name = "jupyterlab" -version = "4.0.3" +version = "4.0.9" description = "JupyterLab computational environment" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.3-py3-none-any.whl", hash = "sha256:d369944391b1d15f2d1f3cb965fb67352956279b2ae6f03ce7947a43940a8301"}, - {file = "jupyterlab-4.0.3.tar.gz", hash = "sha256:e14d1ce46a613028111d0d476a1d7d6b094003b7462bac669f5b478317abcb39"}, + {file = "jupyterlab-4.0.9-py3-none-any.whl", hash = "sha256:9f6f8e36d543fdbcc3df961a1d6a3f524b4a4001be0327a398f68fa4e534107c"}, + {file = "jupyterlab-4.0.9.tar.gz", hash = "sha256:9ebada41d52651f623c0c9f069ddb8a21d6848e4c887d8e5ddc0613166ed5c0b"}, ] [package.dependencies] @@ -1738,33 +1743,33 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.3.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.271)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8)", "sphinx-copybutton"] +dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.1.4)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] [[package]] name = "jupyterlab-pygments" -version = "0.2.2" +version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, - {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, ] [[package]] name = "jupyterlab-server" -version = "2.24.0" +version = "2.25.2" description = "A set of server components for JupyterLab and JupyterLab like applications." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jupyterlab_server-2.24.0-py3-none-any.whl", hash = "sha256:5f077e142bb8dc9b843d960f940c513581bceca3793a0d80f9c67d9522c4e876"}, - {file = "jupyterlab_server-2.24.0.tar.gz", hash = "sha256:4e6f99e0a5579bbbc32e449c4dbb039561d4f1a7827d5733273ed56738f21f07"}, + {file = "jupyterlab_server-2.25.2-py3-none-any.whl", hash = "sha256:5b1798c9cc6a44f65c757de9f97fc06fc3d42535afbf47d2ace5e964ab447aaf"}, + {file = "jupyterlab_server-2.25.2.tar.gz", hash = "sha256:bd0ec7a99ebcedc8bcff939ef86e52c378e44c2707e053fcd81d046ce979ee63"}, ] [package.dependencies] @@ -1772,72 +1777,73 @@ babel = ">=2.10" importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jinja2 = ">=3.0.3" json5 = ">=0.9.0" -jsonschema = ">=4.17.3" +jsonschema = ">=4.18.0" jupyter-server = ">=1.21,<3" packaging = ">=21.3" -requests = ">=2.28" +requests = ">=2.31" [package.extras] docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] -openapi = ["openapi-core (>=0.16.1,<0.17.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "jupyterlab-server[openapi]", "openapi-spec-validator (>=0.5.1,<0.7.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] +openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] [[package]] name = "jupyterlab-widgets" -version = "3.0.8" +version = "3.0.9" description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.8-py3-none-any.whl", hash = "sha256:4715912d6ceab839c9db35953c764b3214ebbc9161c809f6e0510168845dfdf5"}, - {file = "jupyterlab_widgets-3.0.8.tar.gz", hash = "sha256:d428ab97b8d87cc7c54cbf37644d6e0f0e662f23876e05fa460a73ec3257252a"}, + {file = "jupyterlab_widgets-3.0.9-py3-none-any.whl", hash = "sha256:3cf5bdf5b897bf3bccf1c11873aa4afd776d7430200f765e0686bd352487b58d"}, + {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, ] [[package]] name = "lazy-object-proxy" -version = "1.9.0" +version = "1.10.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, - {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, - {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, - {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, - {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, - {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, + {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, + {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, + {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, + {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, + {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, + {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, + {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, ] [[package]] @@ -1906,16 +1912,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1989,26 +1985,26 @@ files = [ [[package]] name = "minilog" -version = "2.2" +version = "2.3" description = "Minimalistic wrapper for Python logging." category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "minilog-2.2-py3-none-any.whl", hash = "sha256:8eda4bf1ee6bcb302a09c36c8e0a58fdd24f41f381c8ca254ddac0f814759e76"}, - {file = "minilog-2.2.tar.gz", hash = "sha256:b2bae5327bf62b82b700bf58e17e5236ee77036dd8de6d44e6c5cff59d0a97ac"}, + {file = "minilog-2.3-py3-none-any.whl", hash = "sha256:e42dc2def1da424e90d6664279c128dde94adc5840557b27857957ed23ee09b6"}, + {file = "minilog-2.3.tar.gz", hash = "sha256:ebdf354f1dd86a2e8a824cdde4b7b50cdbc24b99a5465bc4d1806bd1e030bc92"}, ] [[package]] name = "mistune" -version = "3.0.1" +version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mistune-3.0.1-py3-none-any.whl", hash = "sha256:b9b3e438efbb57c62b5beb5e134dab664800bdf1284a7ee09e8b12b13eb1aac6"}, - {file = "mistune-3.0.1.tar.gz", hash = "sha256:e912116c13aa0944f9dc530db38eb88f6a77087ab128f49f84a48f4c05ea163c"}, + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, ] [[package]] @@ -2040,38 +2036,39 @@ i18n = ["babel (>=2.9.0)"] [[package]] name = "mypy" -version = "1.4.1" +version = "1.7.1" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, + {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, + {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, + {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, + {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, + {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, + {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, + {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, + {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, + {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, + {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, + {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, + {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, + {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, + {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, + {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, + {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, + {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, ] [package.dependencies] @@ -2082,7 +2079,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -2099,14 +2096,14 @@ files = [ [[package]] name = "nbclient" -version = "0.8.0" +version = "0.9.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false python-versions = ">=3.8.0" files = [ - {file = "nbclient-0.8.0-py3-none-any.whl", hash = "sha256:25e861299e5303a0477568557c4045eccc7a34c17fc08e7959558707b9ebe548"}, - {file = "nbclient-0.8.0.tar.gz", hash = "sha256:f9b179cd4b2d7bca965f900a2ebf0db4a12ebff2f36a711cb66861e4ae158e55"}, + {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, + {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, ] [package.dependencies] @@ -2122,14 +2119,14 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.7.3" +version = "7.13.0" description = "Converting Jupyter Notebooks" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.7.3-py3-none-any.whl", hash = "sha256:3022adadff3f86578a47fab7c2228bb3ca9c56a24345642a22f917f6168b48fc"}, - {file = "nbconvert-7.7.3.tar.gz", hash = "sha256:4a5996bf5f3cd16aa0431897ba1aa4c64842c2079f434b3dc6b8c4b252ef3355"}, + {file = "nbconvert-7.13.0-py3-none-any.whl", hash = "sha256:22521cfcc10ba5755e44acb6a70d2bd8a891ce7aed6746481e10cd548b169e19"}, + {file = "nbconvert-7.13.0.tar.gz", hash = "sha256:c6f61c86fca5b28bd17f4f9a308248e59fa2b54919e1589f6cc3575c5dfec2bd"}, ] [package.dependencies] @@ -2156,7 +2153,7 @@ docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sp qtpdf = ["nbconvert[qtpng]"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pytest", "pytest-dependency"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest"] webpdf = ["playwright"] [[package]] @@ -2183,30 +2180,29 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.7" +version = "1.5.8" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, - {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] [[package]] name = "notebook" -version = "7.0.1" +version = "7.0.6" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.1-py3-none-any.whl", hash = "sha256:35327476042140e8739ff8fcfecdc915658ae72b4db72d6e3b537badcdbf9e35"}, - {file = "notebook-7.0.1.tar.gz", hash = "sha256:2e16ad4e63ea89f7efbe212ee7c1693fcfa5ab55ffef75047530f74af4bd926c"}, + {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, + {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, ] [package.dependencies] -importlib-resources = {version = ">=5.0", markers = "python_version < \"3.9\""} jupyter-server = ">=2.4.0,<3" jupyterlab = ">=4.0.2,<5" jupyterlab-server = ">=2.22.1,<3" @@ -2216,7 +2212,7 @@ tornado = ">=6.2.0" [package.extras] dev = ["hatch", "pre-commit"] docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] [[package]] name = "notebook-shim" @@ -2238,26 +2234,26 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "overrides" -version = "7.3.1" +version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"}, - {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"}, + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, ] [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -2302,26 +2298,26 @@ files = [ [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "pexpect" -version = "4.8.0" +version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." category = "dev" optional = false python-versions = "*" files = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, ] [package.dependencies] @@ -2353,14 +2349,14 @@ files = [ [[package]] name = "platformdirs" -version = "3.10.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -2369,14 +2365,14 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -2404,14 +2400,14 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "prometheus-client" -version = "0.17.1" +version = "0.19.0" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, + {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, + {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, ] [package.extras] @@ -2419,14 +2415,14 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.39" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, - {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -2434,26 +2430,28 @@ wcwidth = "*" [[package]] name = "psutil" -version = "5.9.5" +version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, + {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, + {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, + {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, + {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, + {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, + {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, + {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, + {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, + {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, + {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, + {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, + {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, + {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, ] [package.extras] @@ -2461,25 +2459,25 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg2" -version = "2.9.6" +version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "psycopg2-2.9.6-cp310-cp310-win32.whl", hash = "sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1"}, - {file = "psycopg2-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494"}, - {file = "psycopg2-2.9.6-cp311-cp311-win32.whl", hash = "sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad"}, - {file = "psycopg2-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:11aca705ec888e4f4cea97289a0bf0f22a067a32614f6ef64fcf7b8bfbc53744"}, - {file = "psycopg2-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:36c941a767341d11549c0fbdbb2bf5be2eda4caf87f65dfcd7d146828bd27f39"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:869776630c04f335d4124f120b7fb377fe44b0a7645ab3c34b4ba42516951889"}, - {file = "psycopg2-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:a8ad4a47f42aa6aec8d061fdae21eaed8d864d4bb0f0cade5ad32ca16fcd6258"}, - {file = "psycopg2-2.9.6-cp38-cp38-win32.whl", hash = "sha256:2362ee4d07ac85ff0ad93e22c693d0f37ff63e28f0615a16b6635a645f4b9214"}, - {file = "psycopg2-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:d24ead3716a7d093b90b27b3d73459fe8cd90fd7065cf43b3c40966221d8c394"}, - {file = "psycopg2-2.9.6-cp39-cp39-win32.whl", hash = "sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f"}, - {file = "psycopg2-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5"}, - {file = "psycopg2-2.9.6.tar.gz", hash = "sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011"}, + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, ] [[package]] @@ -2511,14 +2509,14 @@ tests = ["pytest"] [[package]] name = "pycodestyle" -version = "2.11.0" +version = "2.11.1" description = "Python style guide checker" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, - {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] @@ -2588,18 +2586,19 @@ files = [ [[package]] name = "pygments" -version = "2.15.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" @@ -2621,18 +2620,18 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "2.17.5" +version = "2.17.7" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.17.5-py3-none-any.whl", hash = "sha256:73995fb8216d3bed149c8d51bba25b2c52a8251a2c8ac846ec668ce38fab5413"}, - {file = "pylint-2.17.5.tar.gz", hash = "sha256:f7b601cbc06fef7e62a754e2b41294c2aa31f1cb659624b9a85bcba29eaf8252"}, + {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, + {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, ] [package.dependencies] -astroid = ">=2.15.6,<=2.17.0-dev0" +astroid = ">=2.15.8,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -2651,30 +2650,33 @@ testutils = ["gitpython (>3)"] [[package]] name = "pymdown-extensions" -version = "10.1" +version = "10.4" description = "Extension pack for Python Markdown." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.1-py3-none-any.whl", hash = "sha256:ef25dbbae530e8f67575d222b75ff0649b1e841e22c2ae9a20bad9472c2207dc"}, - {file = "pymdown_extensions-10.1.tar.gz", hash = "sha256:508009b211373058debb8247e168de4cbcb91b1bff7b5e961b2c3e864e00b195"}, + {file = "pymdown_extensions-10.4-py3-none-any.whl", hash = "sha256:cfc28d6a09d19448bcbf8eee3ce098c7d17ff99f7bd3069db4819af181212037"}, + {file = "pymdown_extensions-10.4.tar.gz", hash = "sha256:bc46f11749ecd4d6b71cf62396104b4a200bad3498cb0f5dad1b8502fe461a35"}, ] [package.dependencies] markdown = ">=3.2" pyyaml = "*" +[package.extras] +extra = ["pygments (>=2.12)"] + [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.3" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -2767,7 +2769,7 @@ six = ">=1.5" name = "python-dotenv" version = "1.0.0" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2792,14 +2794,14 @@ files = [ [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -2828,17 +2830,18 @@ files = [ [[package]] name = "pywinpty" -version = "2.0.11" +version = "2.0.12" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pywinpty-2.0.11-cp310-none-win_amd64.whl", hash = "sha256:452f10ac9ff8ab9151aa8cea9e491a9612a12250b1899278c6a56bc184afb47f"}, - {file = "pywinpty-2.0.11-cp311-none-win_amd64.whl", hash = "sha256:6701867d42aec1239bc0fedf49a336570eb60eb886e81763db77ea2b6c533cc3"}, - {file = "pywinpty-2.0.11-cp38-none-win_amd64.whl", hash = "sha256:0ffd287751ad871141dc9724de70ea21f7fc2ff1af50861e0d232cf70739d8c4"}, - {file = "pywinpty-2.0.11-cp39-none-win_amd64.whl", hash = "sha256:e4e7f023c28ca7aa8e1313e53ba80a4d10171fe27857b7e02f99882dfe3e8638"}, - {file = "pywinpty-2.0.11.tar.gz", hash = "sha256:e244cffe29a894876e2cd251306efd0d8d64abd5ada0a46150a4a71c0b9ad5c5"}, + {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, + {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, + {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, + {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, + {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, + {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, ] [[package]] @@ -2854,7 +2857,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2862,15 +2864,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2887,7 +2882,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2895,7 +2889,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2918,89 +2911,105 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "25.1.0" +version = "25.1.2" description = "Python bindings for 0MQ" category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1a6169e69034eaa06823da6a93a7739ff38716142b3596c180363dee729d713d"}, - {file = "pyzmq-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:19d0383b1f18411d137d891cab567de9afa609b214de68b86e20173dc624c101"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1e931d9a92f628858a50f5bdffdfcf839aebe388b82f9d2ccd5d22a38a789dc"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97d984b1b2f574bc1bb58296d3c0b64b10e95e7026f8716ed6c0b86d4679843f"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:154bddda2a351161474b36dba03bf1463377ec226a13458725183e508840df89"}, - {file = "pyzmq-25.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cb6d161ae94fb35bb518b74bb06b7293299c15ba3bc099dccd6a5b7ae589aee3"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:90146ab578931e0e2826ee39d0c948d0ea72734378f1898939d18bc9c823fcf9"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:831ba20b660b39e39e5ac8603e8193f8fce1ee03a42c84ade89c36a251449d80"}, - {file = "pyzmq-25.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a522510e3434e12aff80187144c6df556bb06fe6b9d01b2ecfbd2b5bfa5c60c"}, - {file = "pyzmq-25.1.0-cp310-cp310-win32.whl", hash = "sha256:be24a5867b8e3b9dd5c241de359a9a5217698ff616ac2daa47713ba2ebe30ad1"}, - {file = "pyzmq-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:5693dcc4f163481cf79e98cf2d7995c60e43809e325b77a7748d8024b1b7bcba"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:13bbe36da3f8aaf2b7ec12696253c0bf6ffe05f4507985a8844a1081db6ec22d"}, - {file = "pyzmq-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:69511d604368f3dc58d4be1b0bad99b61ee92b44afe1cd9b7bd8c5e34ea8248a"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a983c8694667fd76d793ada77fd36c8317e76aa66eec75be2653cef2ea72883"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:332616f95eb400492103ab9d542b69d5f0ff628b23129a4bc0a2fd48da6e4e0b"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58416db767787aedbfd57116714aad6c9ce57215ffa1c3758a52403f7c68cff5"}, - {file = "pyzmq-25.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cad9545f5801a125f162d09ec9b724b7ad9b6440151b89645241d0120e119dcc"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d6128d431b8dfa888bf51c22a04d48bcb3d64431caf02b3cb943269f17fd2994"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b15247c49d8cbea695b321ae5478d47cffd496a2ec5ef47131a9e79ddd7e46c"}, - {file = "pyzmq-25.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:442d3efc77ca4d35bee3547a8e08e8d4bb88dadb54a8377014938ba98d2e074a"}, - {file = "pyzmq-25.1.0-cp311-cp311-win32.whl", hash = "sha256:65346f507a815a731092421d0d7d60ed551a80d9b75e8b684307d435a5597425"}, - {file = "pyzmq-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b45d722046fea5a5694cba5d86f21f78f0052b40a4bbbbf60128ac55bfcc7b6"}, - {file = "pyzmq-25.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f45808eda8b1d71308c5416ef3abe958f033fdbb356984fabbfc7887bed76b3f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b697774ea8273e3c0460cf0bba16cd85ca6c46dfe8b303211816d68c492e132"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b324fa769577fc2c8f5efcd429cef5acbc17d63fe15ed16d6dcbac2c5eb00849"}, - {file = "pyzmq-25.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:5873d6a60b778848ce23b6c0ac26c39e48969823882f607516b91fb323ce80e5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0d9e7ba6a815a12c8575ba7887da4b72483e4cfc57179af10c9b937f3f9308f"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:414b8beec76521358b49170db7b9967d6974bdfc3297f47f7d23edec37329b00"}, - {file = "pyzmq-25.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:01f06f33e12497dca86353c354461f75275a5ad9eaea181ac0dc1662da8074fa"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win32.whl", hash = "sha256:b5a07c4f29bf7cb0164664ef87e4aa25435dcc1f818d29842118b0ac1eb8e2b5"}, - {file = "pyzmq-25.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:968b0c737797c1809ec602e082cb63e9824ff2329275336bb88bd71591e94a90"}, - {file = "pyzmq-25.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47b915ba666c51391836d7ed9a745926b22c434efa76c119f77bcffa64d2c50c"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5af31493663cf76dd36b00dafbc839e83bbca8a0662931e11816d75f36155897"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5489738a692bc7ee9a0a7765979c8a572520d616d12d949eaffc6e061b82b4d1"}, - {file = "pyzmq-25.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1fc56a0221bdf67cfa94ef2d6ce5513a3d209c3dfd21fed4d4e87eca1822e3a3"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:75217e83faea9edbc29516fc90c817bc40c6b21a5771ecb53e868e45594826b0"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3830be8826639d801de9053cf86350ed6742c4321ba4236e4b5568528d7bfed7"}, - {file = "pyzmq-25.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3575699d7fd7c9b2108bc1c6128641a9a825a58577775ada26c02eb29e09c517"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win32.whl", hash = "sha256:95bd3a998d8c68b76679f6b18f520904af5204f089beebb7b0301d97704634dd"}, - {file = "pyzmq-25.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dbc466744a2db4b7ca05589f21ae1a35066afada2f803f92369f5877c100ef62"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:3bed53f7218490c68f0e82a29c92335daa9606216e51c64f37b48eb78f1281f4"}, - {file = "pyzmq-25.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb52e826d16c09ef87132c6e360e1879c984f19a4f62d8a935345deac43f3c12"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ddbef8b53cd16467fdbfa92a712eae46dd066aa19780681a2ce266e88fbc7165"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9301cf1d7fc1ddf668d0abbe3e227fc9ab15bc036a31c247276012abb921b5ff"}, - {file = "pyzmq-25.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e23a8c3b6c06de40bdb9e06288180d630b562db8ac199e8cc535af81f90e64b"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4a82faae00d1eed4809c2f18b37f15ce39a10a1c58fe48b60ad02875d6e13d80"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8398a1b1951aaa330269c35335ae69744be166e67e0ebd9869bdc09426f3871"}, - {file = "pyzmq-25.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d40682ac60b2a613d36d8d3a0cd14fbdf8e7e0618fbb40aa9fa7b796c9081584"}, - {file = "pyzmq-25.1.0-cp38-cp38-win32.whl", hash = "sha256:33d5c8391a34d56224bccf74f458d82fc6e24b3213fc68165c98b708c7a69325"}, - {file = "pyzmq-25.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:c66b7ff2527e18554030319b1376d81560ca0742c6e0b17ff1ee96624a5f1afd"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:af56229ea6527a849ac9fb154a059d7e32e77a8cba27e3e62a1e38d8808cb1a5"}, - {file = "pyzmq-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bdca18b94c404af6ae5533cd1bc310c4931f7ac97c148bbfd2cd4bdd62b96253"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b6b42f7055bbc562f63f3df3b63e3dd1ebe9727ff0f124c3aa7bcea7b3a00f9"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c2fc7aad520a97d64ffc98190fce6b64152bde57a10c704b337082679e74f67"}, - {file = "pyzmq-25.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be86a26415a8b6af02cd8d782e3a9ae3872140a057f1cadf0133de685185c02b"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851fb2fe14036cfc1960d806628b80276af5424db09fe5c91c726890c8e6d943"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a21fec5c3cea45421a19ccbe6250c82f97af4175bc09de4d6dd78fb0cb4c200"}, - {file = "pyzmq-25.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bad172aba822444b32eae54c2d5ab18cd7dee9814fd5c7ed026603b8cae2d05f"}, - {file = "pyzmq-25.1.0-cp39-cp39-win32.whl", hash = "sha256:4d67609b37204acad3d566bb7391e0ecc25ef8bae22ff72ebe2ad7ffb7847158"}, - {file = "pyzmq-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:71c7b5896e40720d30cd77a81e62b433b981005bbff0cb2f739e0f8d059b5d99"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cb27ef9d3bdc0c195b2dc54fcb8720e18b741624686a81942e14c8b67cc61a6"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0c4fc2741e0513b5d5a12fe200d6785bbcc621f6f2278893a9ca7bed7f2efb7d"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fc34fdd458ff77a2a00e3c86f899911f6f269d393ca5675842a6e92eea565bae"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8751f9c1442624da391bbd92bd4b072def6d7702a9390e4479f45c182392ff78"}, - {file = "pyzmq-25.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6581e886aec3135964a302a0f5eb68f964869b9efd1dbafdebceaaf2934f8a68"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5482f08d2c3c42b920e8771ae8932fbaa0a67dff925fc476996ddd8155a170f3"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7fbcafa3ea16d1de1f213c226005fea21ee16ed56134b75b2dede5a2129e62"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:adecf6d02b1beab8d7c04bc36f22bb0e4c65a35eb0b4750b91693631d4081c70"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6d39e42a0aa888122d1beb8ec0d4ddfb6c6b45aecb5ba4013c27e2f28657765"}, - {file = "pyzmq-25.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7018289b402ebf2b2c06992813523de61d4ce17bd514c4339d8f27a6f6809492"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9e68ae9864d260b18f311b68d29134d8776d82e7f5d75ce898b40a88df9db30f"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e21cc00e4debe8f54c3ed7b9fcca540f46eee12762a9fa56feb8512fd9057161"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f666ae327a6899ff560d741681fdcdf4506f990595201ed39b44278c471ad98"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5efcc29056dfe95e9c9db0dfbb12b62db9c4ad302f812931b6d21dd04a9119"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:48e5e59e77c1a83162ab3c163fc01cd2eebc5b34560341a67421b09be0891287"}, - {file = "pyzmq-25.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:108c96ebbd573d929740d66e4c3d1bdf31d5cde003b8dc7811a3c8c5b0fc173b"}, - {file = "pyzmq-25.1.0.tar.gz", hash = "sha256:80c41023465d36280e801564a69cbfce8ae85ff79b080e1913f6e90481fb8957"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, ] [package.dependencies] @@ -3008,25 +3017,24 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qtconsole" -version = "5.4.3" +version = "5.5.1" description = "Jupyter Qt console" category = "dev" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "qtconsole-5.4.3-py3-none-any.whl", hash = "sha256:35fd6e87b1f6d1fd41801b07e69339f8982e76afd4fa8ef35595bc6036717189"}, - {file = "qtconsole-5.4.3.tar.gz", hash = "sha256:5e4082a86a201796b2a5cfd4298352d22b158b51b57736531824715fc2a979dd"}, + {file = "qtconsole-5.5.1-py3-none-any.whl", hash = "sha256:8c75fa3e9b4ed884880ff7cea90a1b67451219279ec33deaee1d59e3df1a5d2b"}, + {file = "qtconsole-5.5.1.tar.gz", hash = "sha256:a0e806c6951db9490628e4df80caec9669b65149c7ba40f9bf033c025a5b56bc"}, ] [package.dependencies] ipykernel = ">=4.1" -ipython-genutils = "*" jupyter-client = ">=4.1" jupyter-core = "*" packaging = "*" pygments = "*" pyzmq = ">=17.1" -qtpy = ">=2.0.1" +qtpy = ">=2.4.0" traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" [package.extras] @@ -3035,14 +3043,14 @@ test = ["flaky", "pytest", "pytest-qt"] [[package]] name = "qtpy" -version = "2.3.1" +version = "2.4.1" description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "QtPy-2.3.1-py3-none-any.whl", hash = "sha256:5193d20e0b16e4d9d3bc2c642d04d9f4e2c892590bd1b9c92bfe38a95d5a2e12"}, - {file = "QtPy-2.3.1.tar.gz", hash = "sha256:a8c74982d6d172ce124d80cafd39653df78989683f760f2281ba91a6e7b9de8b"}, + {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, + {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, ] [package.dependencies] @@ -3053,14 +3061,14 @@ test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] [[package]] name = "referencing" -version = "0.30.0" +version = "0.32.0" description = "JSON Referencing + Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.30.0-py3-none-any.whl", hash = "sha256:c257b08a399b6c2f5a3510a50d28ab5dbc7bbde049bcaf954d43c446f83ab548"}, - {file = "referencing-0.30.0.tar.gz", hash = "sha256:47237742e990457f7512c7d27486394a9aadaf876cbfaa4be65b27b4f4d47c6b"}, + {file = "referencing-0.32.0-py3-none-any.whl", hash = "sha256:bdcd3efb936f82ff86f993093f6da7435c7de69a3b3a5a06678a6050184bee99"}, + {file = "referencing-0.32.0.tar.gz", hash = "sha256:689e64fe121843dcfd57b71933318ef1f91188ffb45367332700a86ac8fd6161"}, ] [package.dependencies] @@ -3118,109 +3126,111 @@ files = [ [[package]] name = "rpds-py" -version = "0.9.2" +version = "0.15.2" description = "Python bindings to Rust's persistent data structures (rpds)" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.9.2-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:ab6919a09c055c9b092798ce18c6c4adf49d24d4d9e43a92b257e3f2548231e7"}, - {file = "rpds_py-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d55777a80f78dd09410bd84ff8c95ee05519f41113b2df90a69622f5540c4f8b"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a216b26e5af0a8e265d4efd65d3bcec5fba6b26909014effe20cd302fd1138fa"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29cd8bfb2d716366a035913ced99188a79b623a3512292963d84d3e06e63b496"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44659b1f326214950a8204a248ca6199535e73a694be8d3e0e869f820767f12f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:745f5a43fdd7d6d25a53ab1a99979e7f8ea419dfefebcab0a5a1e9095490ee5e"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a987578ac5214f18b99d1f2a3851cba5b09f4a689818a106c23dbad0dfeb760f"}, - {file = "rpds_py-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf4151acb541b6e895354f6ff9ac06995ad9e4175cbc6d30aaed08856558201f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:03421628f0dc10a4119d714a17f646e2837126a25ac7a256bdf7c3943400f67f"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13b602dc3e8dff3063734f02dcf05111e887f301fdda74151a93dbbc249930fe"}, - {file = "rpds_py-0.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fae5cb554b604b3f9e2c608241b5d8d303e410d7dfb6d397c335f983495ce7f6"}, - {file = "rpds_py-0.9.2-cp310-none-win32.whl", hash = "sha256:47c5f58a8e0c2c920cc7783113df2fc4ff12bf3a411d985012f145e9242a2764"}, - {file = "rpds_py-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:4ea6b73c22d8182dff91155af018b11aac9ff7eca085750455c5990cb1cfae6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e564d2238512c5ef5e9d79338ab77f1cbbda6c2d541ad41b2af445fb200385e3"}, - {file = "rpds_py-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f411330a6376fb50e5b7a3e66894e4a39e60ca2e17dce258d53768fea06a37bd"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e7521f5af0233e89939ad626b15278c71b69dc1dfccaa7b97bd4cdf96536bb7"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8d3335c03100a073883857e91db9f2e0ef8a1cf42dc0369cbb9151c149dbbc1b"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d25b1c1096ef0447355f7293fbe9ad740f7c47ae032c2884113f8e87660d8f6e"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a5d3fbd02efd9cf6a8ffc2f17b53a33542f6b154e88dd7b42ef4a4c0700fdad"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5934e2833afeaf36bd1eadb57256239785f5af0220ed8d21c2896ec4d3a765f"}, - {file = "rpds_py-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:095b460e117685867d45548fbd8598a8d9999227e9061ee7f012d9d264e6048d"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91378d9f4151adc223d584489591dbb79f78814c0734a7c3bfa9c9e09978121c"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:24a81c177379300220e907e9b864107614b144f6c2a15ed5c3450e19cf536fae"}, - {file = "rpds_py-0.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:de0b6eceb46141984671802d412568d22c6bacc9b230174f9e55fc72ef4f57de"}, - {file = "rpds_py-0.9.2-cp311-none-win32.whl", hash = "sha256:700375326ed641f3d9d32060a91513ad668bcb7e2cffb18415c399acb25de2ab"}, - {file = "rpds_py-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:0766babfcf941db8607bdaf82569ec38107dbb03c7f0b72604a0b346b6eb3298"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:b1440c291db3f98a914e1afd9d6541e8fc60b4c3aab1a9008d03da4651e67386"}, - {file = "rpds_py-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f2996fbac8e0b77fd67102becb9229986396e051f33dbceada3debaacc7033f"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f30d205755566a25f2ae0382944fcae2f350500ae4df4e795efa9e850821d82"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:159fba751a1e6b1c69244e23ba6c28f879a8758a3e992ed056d86d74a194a0f3"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1f044792e1adcea82468a72310c66a7f08728d72a244730d14880cd1dabe36b"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9251eb8aa82e6cf88510530b29eef4fac825a2b709baf5b94a6094894f252387"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01899794b654e616c8625b194ddd1e5b51ef5b60ed61baa7a2d9c2ad7b2a4238"}, - {file = "rpds_py-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0c43f8ae8f6be1d605b0465671124aa8d6a0e40f1fb81dcea28b7e3d87ca1e1"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207f57c402d1f8712618f737356e4b6f35253b6d20a324d9a47cb9f38ee43a6b"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b52e7c5ae35b00566d244ffefba0f46bb6bec749a50412acf42b1c3f402e2c90"}, - {file = "rpds_py-0.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:978fa96dbb005d599ec4fd9ed301b1cc45f1a8f7982d4793faf20b404b56677d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6aa8326a4a608e1c28da191edd7c924dff445251b94653988efb059b16577a4d"}, - {file = "rpds_py-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aad51239bee6bff6823bbbdc8ad85136c6125542bbc609e035ab98ca1e32a192"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd4dc3602370679c2dfb818d9c97b1137d4dd412230cfecd3c66a1bf388a196"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd9da77c6ec1f258387957b754f0df60766ac23ed698b61941ba9acccd3284d1"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:190ca6f55042ea4649ed19c9093a9be9d63cd8a97880106747d7147f88a49d18"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:876bf9ed62323bc7dcfc261dbc5572c996ef26fe6406b0ff985cbcf460fc8a4c"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa2818759aba55df50592ecbc95ebcdc99917fa7b55cc6796235b04193eb3c55"}, - {file = "rpds_py-0.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9ea4d00850ef1e917815e59b078ecb338f6a8efda23369677c54a5825dbebb55"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5855c85eb8b8a968a74dc7fb014c9166a05e7e7a8377fb91d78512900aadd13d"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:14c408e9d1a80dcb45c05a5149e5961aadb912fff42ca1dd9b68c0044904eb32"}, - {file = "rpds_py-0.9.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:65a0583c43d9f22cb2130c7b110e695fff834fd5e832a776a107197e59a1898e"}, - {file = "rpds_py-0.9.2-cp38-none-win32.whl", hash = "sha256:71f2f7715935a61fa3e4ae91d91b67e571aeb5cb5d10331ab681256bda2ad920"}, - {file = "rpds_py-0.9.2-cp38-none-win_amd64.whl", hash = "sha256:674c704605092e3ebbbd13687b09c9f78c362a4bc710343efe37a91457123044"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:07e2c54bef6838fa44c48dfbc8234e8e2466d851124b551fc4e07a1cfeb37260"}, - {file = "rpds_py-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7fdf55283ad38c33e35e2855565361f4bf0abd02470b8ab28d499c663bc5d7c"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:890ba852c16ace6ed9f90e8670f2c1c178d96510a21b06d2fa12d8783a905193"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:50025635ba8b629a86d9d5474e650da304cb46bbb4d18690532dd79341467846"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:517cbf6e67ae3623c5127206489d69eb2bdb27239a3c3cc559350ef52a3bbf0b"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0836d71ca19071090d524739420a61580f3f894618d10b666cf3d9a1688355b1"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c439fd54b2b9053717cca3de9583be6584b384d88d045f97d409f0ca867d80f"}, - {file = "rpds_py-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f68996a3b3dc9335037f82754f9cdbe3a95db42bde571d8c3be26cc6245f2324"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7d68dc8acded354c972116f59b5eb2e5864432948e098c19fe6994926d8e15c3"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f963c6b1218b96db85fc37a9f0851eaf8b9040aa46dec112611697a7023da535"}, - {file = "rpds_py-0.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5a46859d7f947061b4010e554ccd1791467d1b1759f2dc2ec9055fa239f1bc26"}, - {file = "rpds_py-0.9.2-cp39-none-win32.whl", hash = "sha256:e07e5dbf8a83c66783a9fe2d4566968ea8c161199680e8ad38d53e075df5f0d0"}, - {file = "rpds_py-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:682726178138ea45a0766907957b60f3a1bf3acdf212436be9733f28b6c5af3c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:196cb208825a8b9c8fc360dc0f87993b8b260038615230242bf18ec84447c08d"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c7671d45530fcb6d5e22fd40c97e1e1e01965fc298cbda523bb640f3d923b387"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83b32f0940adec65099f3b1c215ef7f1d025d13ff947975a055989cb7fd019a4"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f67da97f5b9eac838b6980fc6da268622e91f8960e083a34533ca710bec8611"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03975db5f103997904c37e804e5f340c8fdabbb5883f26ee50a255d664eed58c"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:987b06d1cdb28f88a42e4fb8a87f094e43f3c435ed8e486533aea0bf2e53d931"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c861a7e4aef15ff91233751619ce3a3d2b9e5877e0fcd76f9ea4f6847183aa16"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02938432352359805b6da099c9c95c8a0547fe4b274ce8f1a91677401bb9a45f"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ef1f08f2a924837e112cba2953e15aacfccbbfcd773b4b9b4723f8f2ddded08e"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:35da5cc5cb37c04c4ee03128ad59b8c3941a1e5cd398d78c37f716f32a9b7f67"}, - {file = "rpds_py-0.9.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:141acb9d4ccc04e704e5992d35472f78c35af047fa0cfae2923835d153f091be"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79f594919d2c1a0cc17d1988a6adaf9a2f000d2e1048f71f298b056b1018e872"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:a06418fe1155e72e16dddc68bb3780ae44cebb2912fbd8bb6ff9161de56e1798"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b2eb034c94b0b96d5eddb290b7b5198460e2d5d0c421751713953a9c4e47d10"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b08605d248b974eb02f40bdcd1a35d3924c83a2a5e8f5d0fa5af852c4d960af"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0805911caedfe2736935250be5008b261f10a729a303f676d3d5fea6900c96a"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab2299e3f92aa5417d5e16bb45bb4586171c1327568f638e8453c9f8d9e0f020"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c8d7594e38cf98d8a7df25b440f684b510cf4627fe038c297a87496d10a174f"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b9ec12ad5f0a4625db34db7e0005be2632c1013b253a4a60e8302ad4d462afd"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1fcdee18fea97238ed17ab6478c66b2095e4ae7177e35fb71fbe561a27adf620"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:933a7d5cd4b84f959aedeb84f2030f0a01d63ae6cf256629af3081cf3e3426e8"}, - {file = "rpds_py-0.9.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:686ba516e02db6d6f8c279d1641f7067ebb5dc58b1d0536c4aaebb7bf01cdc5d"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0173c0444bec0a3d7d848eaeca2d8bd32a1b43f3d3fde6617aac3731fa4be05f"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d576c3ef8c7b2d560e301eb33891d1944d965a4d7a2eacb6332eee8a71827db6"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed89861ee8c8c47d6beb742a602f912b1bb64f598b1e2f3d758948721d44d468"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1054a08e818f8e18910f1bee731583fe8f899b0a0a5044c6e680ceea34f93876"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99e7c4bb27ff1aab90dcc3e9d37ee5af0231ed98d99cb6f5250de28889a3d502"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c545d9d14d47be716495076b659db179206e3fd997769bc01e2d550eeb685596"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9039a11bca3c41be5a58282ed81ae422fa680409022b996032a43badef2a3752"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb39aca7a64ad0c9490adfa719dbeeb87d13be137ca189d2564e596f8ba32c07"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2d8b3b3a2ce0eaa00c5bbbb60b6713e94e7e0becab7b3db6c5c77f979e8ed1f1"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:99b1c16f732b3a9971406fbfe18468592c5a3529585a45a35adbc1389a529a03"}, - {file = "rpds_py-0.9.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c27ee01a6c3223025f4badd533bea5e87c988cb0ba2811b690395dfe16088cfe"}, - {file = "rpds_py-0.9.2.tar.gz", hash = "sha256:8d70e8f14900f2657c249ea4def963bed86a29b81f81f5b76b5a9215680de945"}, + {file = "rpds_py-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:337a8653fb11d2fbe7157c961cc78cb3c161d98cf44410ace9a3dc2db4fad882"}, + {file = "rpds_py-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:813a65f95bfcb7c8f2a70dd6add9b51e9accc3bdb3e03d0ff7a9e6a2d3e174bf"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:082e0e55d73690ffb4da4352d1b5bbe1b5c6034eb9dc8c91aa2a3ee15f70d3e2"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5595c80dd03d7e6c6afb73f3594bf3379a7d79fa57164b591d012d4b71d6ac4c"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb10bb720348fe1647a94eb605accb9ef6a9b1875d8845f9e763d9d71a706387"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53304cc14b1d94487d70086e1cb0cb4c29ec6da994d58ae84a4d7e78c6a6d04d"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d64a657de7aae8db2da60dc0c9e4638a0c3893b4d60101fd564a3362b2bfeb34"}, + {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ee40206d1d6e95eaa2b7b919195e3689a5cf6ded730632de7f187f35a1b6052c"}, + {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1607cda6129f815493a3c184492acb5ae4aa6ed61d3a1b3663aa9824ed26f7ac"}, + {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3e6e2e502c4043c52a99316d89dc49f416acda5b0c6886e0dd8ea7bb35859e8"}, + {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:044f6f46d62444800402851afa3c3ae50141f12013060c1a3a0677e013310d6d"}, + {file = "rpds_py-0.15.2-cp310-none-win32.whl", hash = "sha256:c827a931c6b57f50f1bb5de400dcfb00bad8117e3753e80b96adb72d9d811514"}, + {file = "rpds_py-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3bbc89ce2a219662ea142f0abcf8d43f04a41d5b1880be17a794c39f0d609cb0"}, + {file = "rpds_py-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:1fd0f0b1ccd7d537b858a56355a250108df692102e08aa2036e1a094fd78b2dc"}, + {file = "rpds_py-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b414ef79f1f06fb90b5165db8aef77512c1a5e3ed1b4807da8476b7e2c853283"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c31272c674f725dfe0f343d73b0abe8c878c646967ec1c6106122faae1efc15b"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6945c2d61c42bb7e818677f43638675b8c1c43e858b67a96df3eb2426a86c9d"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02744236ac1895d7be837878e707a5c35fb8edc5137602f253b63623d7ad5c8c"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2181e86d4e1cdf49a7320cb72a36c45efcb7670d0a88f09fd2d3a7967c0540fd"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a8ff8e809da81363bffca2b965cb6e4bf6056b495fc3f078467d1f8266fe27f"}, + {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97532802f14d383f37d603a56e226909f825a83ff298dc1b6697de00d2243999"}, + {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:13716e53627ad97babf72ac9e01cf9a7d4af2f75dd5ed7b323a7a9520e948282"}, + {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2f1f295a5c28cfa74a7d48c95acc1c8a7acd49d7d9072040d4b694fe11cd7166"}, + {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8ec464f20fe803ae00419bd1610934e3bda963aeba1e6181dfc9033dc7e8940c"}, + {file = "rpds_py-0.15.2-cp311-none-win32.whl", hash = "sha256:b61d5096e75fd71018b25da50b82dd70ec39b5e15bb2134daf7eb7bbbc103644"}, + {file = "rpds_py-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:9d41ebb471a6f064c0d1c873c4f7dded733d16ca5db7d551fb04ff3805d87802"}, + {file = "rpds_py-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:13ff62d3561a23c17341b4afc78e8fcfd799ab67c0b1ca32091d71383a98ba4b"}, + {file = "rpds_py-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b70b45a40ad0798b69748b34d508259ef2bdc84fb2aad4048bc7c9cafb68ddb3"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ecbba7efd82bd2a4bb88aab7f984eb5470991c1347bdd1f35fb34ea28dba6e"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9d38494a8d21c246c535b41ecdb2d562c4b933cf3d68de03e8bc43a0d41be652"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13152dfe7d7c27c40df8b99ac6aab12b978b546716e99f67e8a67a1d441acbc3"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:164fcee32f15d04d61568c9cb0d919e37ff3195919cd604039ff3053ada0461b"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a5122b17a4faf5d7a6d91fa67b479736c0cacc7afe791ddebb7163a8550b799"}, + {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:46b4f3d47d1033db569173be62365fbf7808c2bd3fb742314d251f130d90d44c"}, + {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c61e42b4ceb9759727045765e87d51c1bb9f89987aca1fcc8a040232138cad1c"}, + {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d2aa3ca9552f83b0b4fa6ca8c6ce08da6580f37e3e0ab7afac73a1cfdc230c0e"}, + {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec19e823b4ccd87bd69e990879acbce9e961fc7aebe150156b8f4418d4b27b7f"}, + {file = "rpds_py-0.15.2-cp312-none-win32.whl", hash = "sha256:afeabb382c1256a7477b739820bce7fe782bb807d82927102cee73e79b41b38b"}, + {file = "rpds_py-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:422b0901878a31ef167435c5ad46560362891816a76cc0d150683f3868a6f0d1"}, + {file = "rpds_py-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:baf744e5f9d5ee6531deea443be78b36ed1cd36c65a0b95ea4e8d69fa0102268"}, + {file = "rpds_py-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e072f5da38d6428ba1fc1115d3cc0dae895df671cb04c70c019985e8c7606be"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f138f550b83554f5b344d6be35d3ed59348510edc3cb96f75309db6e9bfe8210"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2a4cd924d0e2f4b1a68034abe4cadc73d69ad5f4cf02db6481c0d4d749f548f"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5eb05b654a41e0f81ab27a7c3e88b6590425eb3e934e1d533ecec5dc88a6ffff"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ee066a64f0d2ba45391cac15b3a70dcb549e968a117bd0500634754cfe0e5fc"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51a899792ee2c696072791e56b2020caff58b275abecbc9ae0cb71af0645c95"}, + {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac2ac84a4950d627d84b61f082eba61314373cfab4b3c264b62efab02ababe83"}, + {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:62b292fff4739c6be89e6a0240c02bda5a9066a339d90ab191cf66e9fdbdc193"}, + {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:98ee201a52a7f65608e5494518932e1473fd43535f12cade0a1b4ab32737fe28"}, + {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3d40fb3ca22e3d40f494d577441b263026a3bd8c97ae6ce89b2d3c4b39ac9581"}, + {file = "rpds_py-0.15.2-cp38-none-win32.whl", hash = "sha256:30479a9f1fce47df56b07460b520f49fa2115ec2926d3b1303c85c81f8401ed1"}, + {file = "rpds_py-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:2df3d07a16a3bef0917b28cd564778fbb31f3ffa5b5e33584470e2d1b0f248f0"}, + {file = "rpds_py-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:56b51ba29a18e5f5810224bcf00747ad931c0716e3c09a76b4a1edd3d4aba71f"}, + {file = "rpds_py-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c11bc5814554b018f6c5d6ae0969e43766f81e995000b53a5d8c8057055e886"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2faa97212b0dc465afeedf49045cdd077f97be1188285e646a9f689cb5dfff9e"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:86c01299942b0f4b5b5f28c8701689181ad2eab852e65417172dbdd6c5b3ccc8"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd7d3608589072f63078b4063a6c536af832e76b0b3885f1bfe9e892abe6c207"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:938518a11780b39998179d07f31a4a468888123f9b00463842cd40f98191f4d3"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dccc623725d0b298f557d869a68496a2fd2a9e9c41107f234fa5f7a37d278ac"}, + {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d46ee458452727a147d7897bb33886981ae1235775e05decae5d5d07f537695a"}, + {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d9d7ebcd11ea76ba0feaae98485cd8e31467c3d7985210fab46983278214736b"}, + {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8a5f574b92b3ee7d254e56d56e37ec0e1416acb1ae357c4956d76a1788dc58fb"}, + {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3db0c998c92b909d7c90b66c965590d4f3cd86157176a6cf14aa1f867b77b889"}, + {file = "rpds_py-0.15.2-cp39-none-win32.whl", hash = "sha256:bbc7421cbd28b4316d1d017db338039a7943f945c6f2bb15e1439b14b5682d28"}, + {file = "rpds_py-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:1c24e30d720c0009b6fb2e1905b025da56103c70a8b31b99138e4ed1c2a6c5b0"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e6fcd0a0f62f2997107f758bb372397b8d5fd5f39cc6dcb86f7cb98a2172d6c"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d800a8e2ac62db1b9ea5d6d1724f1a93c53907ca061de4d05ed94e8dfa79050c"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e09d017e3f4d9bd7d17a30d3f59e4d6d9ba2d2ced280eec2425e84112cf623f"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b88c3ab98556bc351b36d6208a6089de8c8db14a7f6e1f57f82a334bd2c18f0b"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f333bfe782a2d05a67cfaa0cc9cd68b36b39ee6acfe099f980541ed973a7093"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b629db53fe17e6ce478a969d30bd1d0e8b53238c46e3a9c9db39e8b65a9ef973"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485fbdd23becb822804ed05622907ee5c8e8a5f43f6f43894a45f463b2217045"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:893e38d0f4319dfa70c0f36381a37cc418985c87b11d9784365b1fff4fa6973b"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8ffdeb7dbd0160d4e391e1f857477e4762d00aa2199c294eb95dfb9451aa1d9f"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:fc33267d58dfbb2361baed52668c5d8c15d24bc0372cecbb79fed77339b55e0d"}, + {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2e7e5633577b3bd56bf3af2ef6ae3778bbafb83743989d57f0e7edbf6c0980e4"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8b9650f92251fdef843e74fc252cdfd6e3c700157ad686eeb0c6d7fdb2d11652"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:07a2e1d78d382f7181789713cdf0c16edbad4fe14fe1d115526cb6f0eef0daa3"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f9c5875515820633bd7709a25c3e60c1ea9ad1c5d4030ce8a8c203309c36fd"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:580182fa5b269c2981e9ce9764367cb4edc81982ce289208d4607c203f44ffde"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa1e626c524d2c7972c0f3a8a575d654a3a9c008370dc2a97e46abd0eaa749b9"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae9d83a81b09ce3a817e2cbb23aabc07f86a3abc664c613cd283ce7a03541e95"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9235be95662559141934fced8197de6fee8c58870f36756b0584424b6d708393"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a72e00826a2b032dda3eb25aa3e3579c6d6773d22d8446089a57a123481cc46c"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ab095edf1d840a6a6a4307e1a5b907a299a94e7b90e75436ee770b8c35d22a25"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b79c63d29101cbaa53a517683557bb550462394fb91044cc5998dd2acff7340"}, + {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:911e600e798374c0d86235e7ef19109cf865d1336942d398ff313375a25a93ba"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3cd61e759c4075510052d1eca5cddbd297fe1164efec14ef1fce3f09b974dfe4"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d2ae79f31da5143e020a8d4fc74e1f0cbcb8011bdf97453c140aa616db51406"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e99d6510c8557510c220b865d966b105464740dcbebf9b79ecd4fbab30a13d9"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c43e1b89099279cc03eb1c725c5de12af6edcd2f78e2f8a022569efa639ada3"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7187bee72384b9cfedf09a29a3b2b6e8815cc64c095cdc8b5e6aec81e9fd5f"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3423007fc0661827e06f8a185a3792c73dda41f30f3421562f210cf0c9e49569"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2974e6dff38afafd5ccf8f41cb8fc94600b3f4fd9b0a98f6ece6e2219e3158d5"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:93c18a1696a8e0388ed84b024fe1a188a26ba999b61d1d9a371318cb89885a8c"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd0841a586b7105513a7c8c3d5c276f3adc762a072d81ef7fae80632afad1e"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:709dc11af2f74ba89c68b1592368c6edcbccdb0a06ba77eb28c8fe08bb6997da"}, + {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:fc066395e6332da1e7525d605b4c96055669f8336600bef8ac569d5226a7c76f"}, + {file = "rpds_py-0.15.2.tar.gz", hash = "sha256:373b76eeb79e8c14f6d82cb1d4d5293f9e4059baec6c1b16dca7ad13b6131b39"}, ] [[package]] @@ -3278,65 +3288,73 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] name = "sqlalchemy" -version = "2.0.19" +version = "2.0.23" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9deaae357edc2091a9ed5d25e9ee8bba98bcfae454b3911adeaf159c2e9ca9e3"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0bf0fd65b50a330261ec7fe3d091dfc1c577483c96a9fa1e4323e932961aa1b5"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d90ccc15ba1baa345796a8fb1965223ca7ded2d235ccbef80a47b85cea2d71a"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4e688f6784427e5f9479d1a13617f573de8f7d4aa713ba82813bcd16e259d1"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:584f66e5e1979a7a00f4935015840be627e31ca29ad13f49a6e51e97a3fb8cae"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c69ce70047b801d2aba3e5ff3cba32014558966109fecab0c39d16c18510f15"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-win32.whl", hash = "sha256:96f0463573469579d32ad0c91929548d78314ef95c210a8115346271beeeaaa2"}, - {file = "SQLAlchemy-2.0.19-cp310-cp310-win_amd64.whl", hash = "sha256:22bafb1da60c24514c141a7ff852b52f9f573fb933b1e6b5263f0daa28ce6db9"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6894708eeb81f6d8193e996257223b6bb4041cb05a17cd5cf373ed836ef87a2"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8f2afd1aafded7362b397581772c670f20ea84d0a780b93a1a1529da7c3d369"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15afbf5aa76f2241184c1d3b61af1a72ba31ce4161013d7cb5c4c2fca04fd6e"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc05b59142445a4efb9c1fd75c334b431d35c304b0e33f4fa0ff1ea4890f92e"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5831138f0cc06b43edf5f99541c64adf0ab0d41f9a4471fd63b54ae18399e4de"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3afa8a21a9046917b3a12ffe016ba7ebe7a55a6fc0c7d950beb303c735c3c3ad"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-win32.whl", hash = "sha256:c896d4e6ab2eba2afa1d56be3d0b936c56d4666e789bfc59d6ae76e9fcf46145"}, - {file = "SQLAlchemy-2.0.19-cp311-cp311-win_amd64.whl", hash = "sha256:024d2f67fb3ec697555e48caeb7147cfe2c08065a4f1a52d93c3d44fc8e6ad1c"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:89bc2b374ebee1a02fd2eae6fd0570b5ad897ee514e0f84c5c137c942772aa0c"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd4d410a76c3762511ae075d50f379ae09551d92525aa5bb307f8343bf7c2c12"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f469f15068cd8351826df4080ffe4cc6377c5bf7d29b5a07b0e717dddb4c7ea2"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cda283700c984e699e8ef0fcc5c61f00c9d14b6f65a4f2767c97242513fcdd84"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:43699eb3f80920cc39a380c159ae21c8a8924fe071bccb68fc509e099420b148"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-win32.whl", hash = "sha256:61ada5831db36d897e28eb95f0f81814525e0d7927fb51145526c4e63174920b"}, - {file = "SQLAlchemy-2.0.19-cp37-cp37m-win_amd64.whl", hash = "sha256:57d100a421d9ab4874f51285c059003292433c648df6abe6c9c904e5bd5b0828"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:16a310f5bc75a5b2ce7cb656d0e76eb13440b8354f927ff15cbaddd2523ee2d1"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf7b5e3856cbf1876da4e9d9715546fa26b6e0ba1a682d5ed2fc3ca4c7c3ec5b"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e7b69d9ced4b53310a87117824b23c509c6fc1f692aa7272d47561347e133b6"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9eb4575bfa5afc4b066528302bf12083da3175f71b64a43a7c0badda2be365"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6b54d1ad7a162857bb7c8ef689049c7cd9eae2f38864fc096d62ae10bc100c7d"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5d6afc41ca0ecf373366fd8e10aee2797128d3ae45eb8467b19da4899bcd1ee0"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-win32.whl", hash = "sha256:430614f18443b58ceb9dedec323ecddc0abb2b34e79d03503b5a7579cd73a531"}, - {file = "SQLAlchemy-2.0.19-cp38-cp38-win_amd64.whl", hash = "sha256:eb60699de43ba1a1f77363f563bb2c652f7748127ba3a774f7cf2c7804aa0d3d"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a752b7a9aceb0ba173955d4f780c64ee15a1a991f1c52d307d6215c6c73b3a4c"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7351c05db355da112e056a7b731253cbeffab9dfdb3be1e895368513c7d70106"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa51ce4aea583b0c6b426f4b0563d3535c1c75986c4373a0987d84d22376585b"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae7473a67cd82a41decfea58c0eac581209a0aa30f8bc9190926fbf628bb17f7"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851a37898a8a39783aab603c7348eb5b20d83c76a14766a43f56e6ad422d1ec8"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539010665c90e60c4a1650afe4ab49ca100c74e6aef882466f1de6471d414be7"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-win32.whl", hash = "sha256:f82c310ddf97b04e1392c33cf9a70909e0ae10a7e2ddc1d64495e3abdc5d19fb"}, - {file = "SQLAlchemy-2.0.19-cp39-cp39-win_amd64.whl", hash = "sha256:8e712cfd2e07b801bc6b60fdf64853bc2bd0af33ca8fa46166a23fe11ce0dbb0"}, - {file = "SQLAlchemy-2.0.19-py3-none-any.whl", hash = "sha256:314145c1389b021a9ad5aa3a18bac6f5d939f9087d7fc5443be28cba19d2c972"}, - {file = "SQLAlchemy-2.0.19.tar.gz", hash = "sha256:77a14fa20264af73ddcdb1e2b9c5a829b8cc6b8304d0f093271980e36c200a3f"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, + {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, + {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, + {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, + {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, + {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, + {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, + {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, + {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, ] [package.dependencies] @@ -3344,7 +3362,8 @@ greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or typing-extensions = ">=4.2.0" [package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] @@ -3355,7 +3374,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)"] +oracle = ["cx-oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -3369,14 +3388,14 @@ sqlcipher = ["sqlcipher3-binary"] [[package]] name = "stack-data" -version = "0.6.2" +version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" category = "dev" optional = false python-versions = "*" files = [ - {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, - {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, ] [package.dependencies] @@ -3389,14 +3408,14 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "terminado" -version = "0.17.1" +version = "0.18.0" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "terminado-0.17.1-py3-none-any.whl", hash = "sha256:8650d44334eba354dd591129ca3124a6ba42c3d5b70df5051b6921d506fdaeae"}, - {file = "terminado-0.17.1.tar.gz", hash = "sha256:6ccbbcd3a4f8a25a5ec04991f39a0b8db52dfcd487ea0e578d977e6752380333"}, + {file = "terminado-0.18.0-py3-none-any.whl", hash = "sha256:87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e"}, + {file = "terminado-0.18.0.tar.gz", hash = "sha256:1ea08a89b835dd1b8c0c900d92848147cef2537243361b2e3f4dc15df9b6fded"}, ] [package.dependencies] @@ -3407,6 +3426,7 @@ tornado = ">=6.1.0" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] [[package]] name = "tinycss2" @@ -3441,52 +3461,52 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.1" +version = "0.12.3" description = "Style preserving TOML library" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, + {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, + {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, ] [[package]] name = "tornado" -version = "6.3.2" +version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "dev" optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c367ab6c0393d71171123ca5515c61ff62fe09024fa6bf299cd1339dc9456829"}, - {file = "tornado-6.3.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b46a6ab20f5c7c1cb949c72c1994a4585d2eaa0be4853f50a03b5031e964fc7c"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2de14066c4a38b4ecbbcd55c5cc4b5340eb04f1c5e81da7451ef555859c833f"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05615096845cf50a895026f749195bf0b10b8909f9be672f50b0fe69cba368e4"}, - {file = "tornado-6.3.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b17b1cf5f8354efa3d37c6e28fdfd9c1c1e5122f2cb56dac121ac61baa47cbe"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:29e71c847a35f6e10ca3b5c2990a52ce38b233019d8e858b755ea6ce4dcdd19d"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:834ae7540ad3a83199a8da8f9f2d383e3c3d5130a328889e4cc991acc81e87a0"}, - {file = "tornado-6.3.2-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6a0848f1aea0d196a7c4f6772197cbe2abc4266f836b0aac76947872cd29b411"}, - {file = "tornado-6.3.2-cp38-abi3-win32.whl", hash = "sha256:7efcbcc30b7c654eb6a8c9c9da787a851c18f8ccd4a5a3a95b05c7accfa068d2"}, - {file = "tornado-6.3.2-cp38-abi3-win_amd64.whl", hash = "sha256:0c325e66c8123c606eea33084976c832aa4e766b7dff8aedd7587ea44a604cdf"}, - {file = "tornado-6.3.2.tar.gz", hash = "sha256:4b927c4f19b71e627b13f3db2324e4ae660527143f9e1f2e2fb404f3a187e2ba"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] name = "traitlets" -version = "5.9.0" +version = "5.14.0" description = "Traitlets Python configuration system" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, + {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, + {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "types-python-dateutil" @@ -3502,14 +3522,14 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.2" +version = "2.31.0.6" description = "Typing stubs for requests" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, - {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, + {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, + {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, ] [package.dependencies] @@ -3541,14 +3561,14 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -3568,18 +3588,18 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "1.26.16" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] @@ -3658,14 +3678,14 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.6" +version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" category = "dev" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, + {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, + {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, ] [[package]] @@ -3698,31 +3718,31 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.1" +version = "1.7.0" description = "WebSocket client for Python with low level API options" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, ] [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] name = "werkzeug" -version = "2.3.6" +version = "3.0.1" description = "The comprehensive WSGI web application library." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Werkzeug-2.3.6-py3-none-any.whl", hash = "sha256:935539fa1413afbb9195b24880778422ed620c0fc09670945185cce4d91a8890"}, - {file = "Werkzeug-2.3.6.tar.gz", hash = "sha256:98c774df2f91b05550078891dee5f0eb0cb797a522c757a2452b9cee5b202330"}, + {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, + {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, ] [package.dependencies] @@ -3733,118 +3753,113 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "widgetsnbextension" -version = "4.0.8" +version = "4.0.9" description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.8-py3-none-any.whl", hash = "sha256:2e37f0ce9da11651056280c7efe96f2db052fe8fc269508e3724f5cbd6c93018"}, - {file = "widgetsnbextension-4.0.8.tar.gz", hash = "sha256:9ec291ba87c2dfad42c3d5b6f68713fa18be1acd7476569516b2431682315c17"}, + {file = "widgetsnbextension-4.0.9-py3-none-any.whl", hash = "sha256:91452ca8445beb805792f206e560c1769284267a30ceb1cec9f5bcc887d15175"}, + {file = "widgetsnbextension-4.0.9.tar.gz", hash = "sha256:3c1f5e46dc1166dfd40a42d685e6a51396fd34ff878742a3e47c6f0cc4a2a385"}, ] [[package]] name = "wrapt" -version = "1.15.0" +version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" files = [ - {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, - {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, - {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, - {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, - {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, - {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, - {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, - {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, - {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, - {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, - {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, - {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, - {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, - {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, - {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, - {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, - {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, - {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, - {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, - {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, - {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, - {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, - {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, - {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, - {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, - {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, - {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, - {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, - {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, - {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, - {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, - {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, - {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, - {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, - {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, - {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, - {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, - {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] [[package]] name = "zipp" -version = "3.16.2" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "efd9b6ce34566eef2dd2f664134d65ebe2487959b5a689aab3a10cdc6d65abc9" +content-hash = "fa8a89016f3b4476727c90453b9f147aac223c5530e4fc2683f2ed17b72f8f27" diff --git a/pyproject.toml b/pyproject.toml index e98e63e3..f47580ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,9 @@ Flask-Cors = "^4.0.0" flask-restx = "^1.1.0" waitress = "^2.1.2" +# Email +flask-mail = "^0.9.1" + # API Client requests = "^2.31.0" From d4645a9d959929d20b389b387fafeb4652d9b727 Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Wed, 20 Dec 2023 12:41:25 -0800 Subject: [PATCH 388/505] =?UTF-8?q?feat:=20=E2=9C=A8=20password=20change?= =?UTF-8?q?=20endpoint=20(#33)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :construction: wip: endpoint to change user password * :sparkles: feat: endpoint to change password * style: 🎨 fix code style issues with Black * :hammer: chore: remove print statements * :hammer: chore: add data type for request.json * :recycle: refactor: password change endpoint modified and moved to authentication * :sparkles: feat: user password change test created for pytest * style: 🎨 fix code style issues with Black * :recycle: refactor: password change test only for one client * :recycle: refactor: password change endpoint modified to POST * :recycle: refactor: pytest password changed test modified to be post * :sparkles: feat: tests for logging in user after password changes --------- Co-authored-by: Lint Action --- apis/authentication.py | 68 +++++++++++++++++++++++++++++++++++ tests/functional/test_user.py | 58 ++++++++++++++++++++++++++++++ 2 files changed, 126 insertions(+) create mode 100644 tests/functional/test_user.py diff --git a/apis/authentication.py b/apis/authentication.py index dace3298..3fd0e44f 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -388,6 +388,74 @@ def post(self): return resp +@api.route("/auth/password/change") +class UserPasswordEndpoint(Resource): + """ + Endpoint for updating user password + """ + + @api.doc(description="Updates User password") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self): + """Updates user password""" + + def validate_current_password(instance): + received_password = instance + + if not g.user.check_password(received_password): + raise ValidationError("Current password is incorrect") + + return True + + def confirm_new_password(instance): + data: Union[Any, dict] = request.json + new_password = data["new_password"] + confirm_password = instance + + if new_password != confirm_password: + raise ValidationError("New password and confirm password do not match") + + return True + + # Schema validation + schema = { + "type": "object", + "required": ["old_password", "new_password", "confirm_password"], + "additionalProperties": False, + "properties": { + "old_password": { + "type": "string", + "minLength": 1, + "format": "current password", + }, + "new_password": {"type": "string", "minLength": 1}, + "confirm_password": { + "type": "string", + "minLength": 1, + "format": "password confirmation", + }, + }, + } + + format_checker = FormatChecker() + format_checker.checks("current password")(validate_current_password) + format_checker.checks("password confirmation")(confirm_new_password) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + return e.message, 400 + + data: Union[Any, dict] = request.json + user = model.User.query.get(g.user.id) + user.set_password(data["new_password"]) + model.db.session.commit() + return "Password updated successfully", 200 + + # @api.route("/auth/current-users") # class CurrentUsers(Resource): # """function is used to see all logged users in diff --git a/tests/functional/test_user.py b/tests/functional/test_user.py new file mode 100644 index 00000000..d325e713 --- /dev/null +++ b/tests/functional/test_user.py @@ -0,0 +1,58 @@ +"""Tests for user settings""" + + +# ------------------- Password Change ------------------- # +def test_post_password_change(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/change' endpoint is requested (PUT) + THEN check that the response is valid and the password is changed + """ + _logged_in_client = clients[0] + + response = _logged_in_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + + assert response.status_code == 200 + + +def test_post_password_login_invalid_old_password(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/login' endpoint is requested (POST) + THEN check that the response is an error when old password is provided + """ + _logged_in_client = clients[0] + response = _logged_in_client.post( + "/auth/login", + json={ + "email_address": "test@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + + assert response.status_code == 401 + + +def test_post_login_new_password(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/login' endpoint is requested (POST) + THEN check that the response is valid when new password is provided + """ + _logged_in_client = clients[0] + response = _logged_in_client.post( + "/auth/login", + json={ + "email_address": "test@fairhub.io", + "password": "Updatedpassword4testing!", + }, + ) + + assert response.status_code == 200 From 810076f4e6b89bb7c22ed3d9974f2346df2dea80 Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Thu, 21 Dec 2023 13:13:24 -0800 Subject: [PATCH 389/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20update=20dataset?= =?UTF-8?q?=20other=20model=20to=20return=20needed=20data=20(#35)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :bug: fix: send id and resource_type back in the dataset/other response PUT/GET * :hammer: chore: remove prints * :recycle: refactor: id removed from response in dataset other --- apis/dataset_metadata/dataset_other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 85593068..6df0f9aa 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -10,13 +10,13 @@ dataset_other = api.model( "DatasetOther", { - "id": fields.String(required=True), "language": fields.String(required=True), "managing_organization_name": fields.String(required=True), "managing_organization_ror_id": fields.String(required=True), "size": fields.List(fields.String, required=True), "standards_followed": fields.String(required=True), "acknowledgement": fields.String(required=True), + "resource_type": fields.String(required=True), }, ) From a9faba00158daf55c2e0eac3e5fb79726a3582da Mon Sep 17 00:00:00 2001 From: Dorian Portillo <50216901+slugb0t@users.noreply.github.com> Date: Thu, 28 Dec 2023 11:47:22 -0800 Subject: [PATCH 390/505] =?UTF-8?q?feat:=20=E2=9C=A8=20utils=20for=20licen?= =?UTF-8?q?se=20text=20and=20support=20to=20store=20license=20(#38)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added testing license_text dataset metadata * style: 🎨 fix code style issues with Black * :wrench: chore: update poetry lock * :sparkles: feat: utils endpoint adding for requesting json * style: 🎨 fix code style issues with Black --------- Co-authored-by: aydawka Co-authored-by: Lint Action --- .../29e42ce4be3f_adding_license_text.py | 25 +++ apis/__init__.py | 3 + apis/authentication.py | 8 +- apis/dataset_metadata/dataset_rights.py | 11 +- apis/utils.py | 21 ++ apis/utils_namespace.py | 4 + core/utils.py | 17 ++ model/dataset_metadata/dataset_rights.py | 4 + poetry.lock | 208 ++++++++++-------- .../test_study_dataset_metadata_api.py | 19 ++ tests/functional/test_study_version_api.py | 1 + 11 files changed, 219 insertions(+), 102 deletions(-) create mode 100644 alembic/versions/29e42ce4be3f_adding_license_text.py create mode 100644 apis/utils.py create mode 100644 apis/utils_namespace.py diff --git a/alembic/versions/29e42ce4be3f_adding_license_text.py b/alembic/versions/29e42ce4be3f_adding_license_text.py new file mode 100644 index 00000000..f00ead96 --- /dev/null +++ b/alembic/versions/29e42ce4be3f_adding_license_text.py @@ -0,0 +1,25 @@ +"""adding license text + +Revision ID: 29e42ce4be3f +Revises: 72ac2b020c7c +Create Date: 2023-12-21 13:34:26.478808 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "29e42ce4be3f" +down_revision: Union[str, None] = "72ac2b020c7c" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column("dataset_rights", sa.Column("license_text", sa.String, nullable=True)) + op.execute("UPDATE dataset_rights SET license_text = ''") + with op.batch_alter_table("dataset_rights") as batch_op: + batch_op.alter_column("license_text", nullable=False) diff --git a/apis/__init__.py b/apis/__init__.py index bf2e0873..3c48bc8a 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -41,6 +41,7 @@ from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator from .study_metadata.study_status import api as status from .user import api as user +from .utils import api as utils api = Api( title="FAIRHUB", @@ -88,6 +89,7 @@ "identification", "study_description", "dataset_contributor", + "utils", ] @@ -114,3 +116,4 @@ def get(self): api.add_namespace(participants_api) api.add_namespace(contributors_api) api.add_namespace(user) +api.add_namespace(utils) diff --git a/apis/authentication.py b/apis/authentication.py index 3fd0e44f..bdaf33b0 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -286,13 +286,7 @@ def authentication(): def authorization(): """it checks whether url is allowed to be reached to specific routes""" # white listed routes - public_routes = [ - "/auth", - "/docs", - "/echo", - "/swaggerui", - "/swagger.json", - ] + public_routes = ["/auth", "/docs", "/echo", "/swaggerui", "/swagger.json", "/utils"] for route in public_routes: if request.path.startswith(route): diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 64f172bf..86ddd2ac 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -18,6 +18,7 @@ "uri": fields.String(required=True), "identifier": fields.String(required=True), "identifier_scheme": fields.String(required=True), + "license_text": fields.String(required=True), }, ) @@ -35,6 +36,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset rights""" dataset_ = model.Dataset.query.get(dataset_id) dataset_rights_ = dataset_.dataset_rights + print([d.to_dict() for d in dataset_rights_]) return [d.to_dict() for d in dataset_rights_], 200 @api.doc("update rights") @@ -58,8 +60,15 @@ def post(self, study_id: int, dataset_id: int): "identifier_scheme": {"type": "string"}, "rights": {"type": "string", "minLength": 1}, "uri": {"type": "string"}, + "license_text": {"type": "string"}, }, - "required": ["identifier", "identifier_scheme", "rights", "uri"], + "required": [ + "identifier", + "identifier_scheme", + "rights", + "uri", + "license_text", + ], }, "uniqueItems": True, } diff --git a/apis/utils.py b/apis/utils.py new file mode 100644 index 00000000..d797b2b8 --- /dev/null +++ b/apis/utils.py @@ -0,0 +1,21 @@ +"""Utils Endpoints""" +from flask import request +from flask_restx import Resource, Namespace + +from core.utils import request_json + +api = Namespace("Utils", description="utils operations", path="/") + + +@api.route("/utils/requestjson") +class RequestJSON(Resource): + """requestJSON Resource""" + + @api.doc("requestjson") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def get(self): + """Get requestjson""" + url = request.args.get("url") + + return request_json(url), 200 diff --git a/apis/utils_namespace.py b/apis/utils_namespace.py new file mode 100644 index 00000000..beb9722c --- /dev/null +++ b/apis/utils_namespace.py @@ -0,0 +1,4 @@ +"""Namespace for utils operations""" +from flask_restx import Namespace + +api = Namespace("Utils", description="utils operations", path="/") diff --git a/core/utils.py b/core/utils.py index e69de29b..cde0b34b 100644 --- a/core/utils.py +++ b/core/utils.py @@ -0,0 +1,17 @@ +"""Utils for core""" +import requests + + +def request_json(url): + """ "Request JSON from URL""" + try: + payload = {} + headers = {} + + response = requests.request( + "GET", url, headers=headers, data=payload, timeout=10 + ) + + return response.json() + except Exception as e: + raise e diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 77103cde..6df32e51 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -18,6 +18,8 @@ def __init__(self, dataset): uri = db.Column(db.String, nullable=False) identifier = db.Column(db.String, nullable=False) identifier_scheme = db.Column(db.String, nullable=False) + license_text = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) @@ -31,6 +33,7 @@ def to_dict(self): "identifier": self.identifier, "identifier_scheme": self.identifier_scheme, "created_at": self.created_at, + "license_text": self.license_text, } def to_dict_metadata(self): @@ -51,4 +54,5 @@ def update(self, data: dict): self.uri = data["uri"] self.identifier = data["identifier"] self.identifier_scheme = data["identifier_scheme"] + self.license_text = data["license_text"] self.dataset.touch_dataset() diff --git a/poetry.lock b/poetry.lock index 990d7c8f..f5472891 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1123,74 +1123,74 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "greenlet" -version = "3.0.2" +version = "3.0.3" description = "Lightweight in-process concurrent programming" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9acd8fd67c248b8537953cb3af8787c18a87c33d4dcf6830e410ee1f95a63fd4"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:339c0272a62fac7e602e4e6ec32a64ff9abadc638b72f17f6713556ed011d493"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38878744926cec29b5cc3654ef47f3003f14bfbba7230e3c8492393fe29cc28b"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b3f0497db77cfd034f829678b28267eeeeaf2fc21b3f5041600f7617139e6773"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1a8a08de7f68506a38f9a2ddb26bbd1480689e66d788fcd4b5f77e2d9ecfcc"}, - {file = "greenlet-3.0.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:89a6f6ddcbef4000cda7e205c4c20d319488ff03db961d72d4e73519d2465309"}, - {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c1f647fe5b94b51488b314c82fdda10a8756d650cee8d3cd29f657c6031bdf73"}, - {file = "greenlet-3.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9560c580c896030ff9c311c603aaf2282234643c90d1dec738a1d93e3e53cd51"}, - {file = "greenlet-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2e9c5423046eec21f6651268cb674dfba97280701e04ef23d312776377313206"}, - {file = "greenlet-3.0.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1fd25dfc5879a82103b3d9e43fa952e3026c221996ff4d32a9c72052544835d"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfdc950dd25f25d6582952e58521bca749cf3eeb7a9bad69237024308c8196"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edf7a1daba1f7c54326291a8cde58da86ab115b78c91d502be8744f0aa8e3ffa"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4cf532bf3c58a862196b06947b1b5cc55503884f9b63bf18582a75228d9950e"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e79fb5a9fb2d0bd3b6573784f5e5adabc0b0566ad3180a028af99523ce8f6138"}, - {file = "greenlet-3.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:006c1028ac0cfcc4e772980cfe73f5476041c8c91d15d64f52482fc571149d46"}, - {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fefd5eb2c0b1adffdf2802ff7df45bfe65988b15f6b972706a0e55d451bffaea"}, - {file = "greenlet-3.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c0fdb8142742ee68e97c106eb81e7d3e883cc739d9c5f2b28bc38a7bafeb6d1"}, - {file = "greenlet-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:8f8d14a0a4e8c670fbce633d8b9a1ee175673a695475acd838e372966845f764"}, - {file = "greenlet-3.0.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:654b84c9527182036747938b81938f1d03fb8321377510bc1854a9370418ab66"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bc4fde0842ff2b9cf33382ad0b4db91c2582db836793d58d174c569637144"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27b142a9080bdd5869a2fa7ebf407b3c0b24bd812db925de90e9afe3c417fd6"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0df7eed98ea23b20e9db64d46eb05671ba33147df9405330695bcd81a73bb0c9"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5d60805057d8948065338be6320d35e26b0a72f45db392eb32b70dd6dc9227"}, - {file = "greenlet-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e0e28f5233d64c693382f66d47c362b72089ebf8ac77df7e12ac705c9fa1163d"}, - {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e4bfa752b3688d74ab1186e2159779ff4867644d2b1ebf16db14281f0445377"}, - {file = "greenlet-3.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c42bb589e6e9f9d8bdd79f02f044dff020d30c1afa6e84c0b56d1ce8a324553c"}, - {file = "greenlet-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:b2cedf279ca38ef3f4ed0d013a6a84a7fc3d9495a716b84a5fc5ff448965f251"}, - {file = "greenlet-3.0.2-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:6d65bec56a7bc352bcf11b275b838df618651109074d455a772d3afe25390b7d"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0acadbc3f72cb0ee85070e8d36bd2a4673d2abd10731ee73c10222cf2dd4713c"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14b5d999aefe9ffd2049ad19079f733c3aaa426190ffecadb1d5feacef8fe397"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f27aa32466993c92d326df982c4acccd9530fe354e938d9e9deada563e71ce76"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f34a765c5170c0673eb747213a0275ecc749ab3652bdbec324621ed5b2edaef"}, - {file = "greenlet-3.0.2-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:520fcb53a39ef90f5021c77606952dbbc1da75d77114d69b8d7bded4a8e1a813"}, - {file = "greenlet-3.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1fceb5351ab1601903e714c3028b37f6ea722be6873f46e349a960156c05650"}, - {file = "greenlet-3.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7363756cc439a503505b67983237d1cc19139b66488263eb19f5719a32597836"}, - {file = "greenlet-3.0.2-cp37-cp37m-win32.whl", hash = "sha256:d5547b462b8099b84746461e882a3eb8a6e3f80be46cb6afb8524eeb191d1a30"}, - {file = "greenlet-3.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:950e21562818f9c771989b5b65f990e76f4ac27af66e1bb34634ae67886ede2a"}, - {file = "greenlet-3.0.2-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:d64643317e76b4b41fdba659e7eca29634e5739b8bc394eda3a9127f697ed4b0"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f9ea7c2c9795549653b6f7569f6bc75d2c7d1f6b2854eb8ce0bc6ec3cb2dd88"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db4233358d3438369051a2f290f1311a360d25c49f255a6c5d10b5bcb3aa2b49"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ed9bf77b41798e8417657245b9f3649314218a4a17aefb02bb3992862df32495"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d0df07a38e41a10dfb62c6fc75ede196572b580f48ee49b9282c65639f3965"}, - {file = "greenlet-3.0.2-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10d247260db20887ae8857c0cbc750b9170f0b067dd7d38fb68a3f2334393bd3"}, - {file = "greenlet-3.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a37ae53cca36823597fd5f65341b6f7bac2dd69ecd6ca01334bb795460ab150b"}, - {file = "greenlet-3.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:80d068e4b6e2499847d916ef64176811ead6bf210a610859220d537d935ec6fd"}, - {file = "greenlet-3.0.2-cp38-cp38-win32.whl", hash = "sha256:b1405614692ac986490d10d3e1a05e9734f473750d4bee3cf7d1286ef7af7da6"}, - {file = "greenlet-3.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8756a94ed8f293450b0e91119eca2a36332deba69feb2f9ca410d35e74eae1e4"}, - {file = "greenlet-3.0.2-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2c93cd03acb1499ee4de675e1a4ed8eaaa7227f7949dc55b37182047b006a7aa"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dac09e3c0b78265d2e6d3cbac2d7c48bd1aa4b04a8ffeda3adde9f1688df2c3"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ee59c4627c8c4bb3e15949fbcd499abd6b7f4ad9e0bfcb62c65c5e2cabe0ec4"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18fe39d70d482b22f0014e84947c5aaa7211fb8e13dc4cc1c43ed2aa1db06d9a"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84bef3cfb6b6bfe258c98c519811c240dbc5b33a523a14933a252e486797c90"}, - {file = "greenlet-3.0.2-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aecea0442975741e7d69daff9b13c83caff8c13eeb17485afa65f6360a045765"}, - {file = "greenlet-3.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f260e6c2337871a52161824058923df2bbddb38bc11a5cbe71f3474d877c5bd9"}, - {file = "greenlet-3.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fc14dd9554f88c9c1fe04771589ae24db76cd56c8f1104e4381b383d6b71aff8"}, - {file = "greenlet-3.0.2-cp39-cp39-win32.whl", hash = "sha256:bfcecc984d60b20ffe30173b03bfe9ba6cb671b0be1e95c3e2056d4fe7006590"}, - {file = "greenlet-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:c235131bf59d2546bb3ebaa8d436126267392f2e51b85ff45ac60f3a26549af0"}, - {file = "greenlet-3.0.2.tar.gz", hash = "sha256:1c1129bc47266d83444c85a8e990ae22688cf05fb20d7951fd2866007c2ba9bc"}, -] - -[package.extras] -docs = ["Sphinx"] + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] [[package]] @@ -1912,6 +1912,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2036,39 +2046,39 @@ i18n = ["babel (>=2.9.0)"] [[package]] name = "mypy" -version = "1.7.1" +version = "1.8.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] @@ -2119,14 +2129,14 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.13.0" +version = "7.13.1" description = "Converting Jupyter Notebooks" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.13.0-py3-none-any.whl", hash = "sha256:22521cfcc10ba5755e44acb6a70d2bd8a891ce7aed6746481e10cd548b169e19"}, - {file = "nbconvert-7.13.0.tar.gz", hash = "sha256:c6f61c86fca5b28bd17f4f9a308248e59fa2b54919e1589f6cc3575c5dfec2bd"}, + {file = "nbconvert-7.13.1-py3-none-any.whl", hash = "sha256:3c50eb2d326478cc90b8759cf2ab9dde3d892c6537cd6a5bc0991db8ef734bcc"}, + {file = "nbconvert-7.13.1.tar.gz", hash = "sha256:2dc8267dbdfeedce2dcd34c9e3f1b51af18f43cb105549d1c5a18189ec23ba85"}, ] [package.dependencies] @@ -2857,6 +2867,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2864,8 +2875,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2882,6 +2900,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2889,6 +2908,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 45c03b4e..b1c8c88e 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -3375,6 +3375,7 @@ def test_post_dataset_rights_metadata(clients): "identifier_scheme": "Identifier Scheme", "rights": "Rights", "uri": "URI", + "license_text": "license text", } ], ) @@ -3389,6 +3390,7 @@ def test_post_dataset_rights_metadata(clients): assert response_data[0]["identifier_scheme"] == "Identifier Scheme" assert response_data[0]["rights"] == "Rights" assert response_data[0]["uri"] == "URI" + assert response_data[0]["license_text"] == "license text" admin_response = _admin_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", @@ -3398,6 +3400,7 @@ def test_post_dataset_rights_metadata(clients): "identifier_scheme": "Admin Identifier Scheme", "rights": "Admin Rights", "uri": "Admin URI", + "license_text": "license text", } ], ) @@ -3412,6 +3415,7 @@ def test_post_dataset_rights_metadata(clients): assert admin_response_data[0]["identifier_scheme"] == "Admin Identifier Scheme" assert admin_response_data[0]["rights"] == "Admin Rights" assert admin_response_data[0]["uri"] == "Admin URI" + assert admin_response_data[0]["license_text"] == "license text" editor_response = _editor_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", @@ -3421,6 +3425,7 @@ def test_post_dataset_rights_metadata(clients): "identifier_scheme": "Editor Identifier Scheme", "rights": "Editor Rights", "uri": "Editor URI", + "license_text": "license text", } ], ) @@ -3433,6 +3438,7 @@ def test_post_dataset_rights_metadata(clients): assert editor_response_data[0]["identifier_scheme"] == "Editor Identifier Scheme" assert editor_response_data[0]["rights"] == "Editor Rights" assert editor_response_data[0]["uri"] == "Editor URI" + assert editor_response_data[0]["license_text"] == "license text" viewer_response = _viewer_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", @@ -3442,6 +3448,7 @@ def test_post_dataset_rights_metadata(clients): "identifier_scheme": "Viewer Identifier Scheme", "rights": "Viewer Rights", "uri": "Viewer URI", + "license_text": "license text", } ], ) @@ -3488,53 +3495,65 @@ def test_get_dataset_rights_metadata(clients): assert response_data[0]["identifier_scheme"] == "Identifier Scheme" assert response_data[0]["rights"] == "Rights" assert response_data[0]["uri"] == "URI" + assert response_data[0]["license_text"] == "license text" assert response_data[1]["identifier"] == "Admin Identifier" assert response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" assert response_data[1]["rights"] == "Admin Rights" assert response_data[1]["uri"] == "Admin URI" + assert response_data[1]["license_text"] == "license text" assert response_data[2]["identifier"] == "Editor Identifier" assert response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" assert response_data[2]["rights"] == "Editor Rights" assert response_data[2]["uri"] == "Editor URI" + assert response_data[2]["license_text"] == "license text" assert admin_response_data[0]["identifier"] == "Identifier" assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" assert admin_response_data[0]["rights"] == "Rights" assert admin_response_data[0]["uri"] == "URI" + assert admin_response_data[0]["license_text"] == "license text" assert admin_response_data[1]["identifier"] == "Admin Identifier" assert admin_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" assert admin_response_data[1]["rights"] == "Admin Rights" assert admin_response_data[1]["uri"] == "Admin URI" + assert admin_response_data[1]["license_text"] == "license text" assert admin_response_data[2]["identifier"] == "Editor Identifier" assert admin_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" assert admin_response_data[2]["rights"] == "Editor Rights" assert admin_response_data[2]["uri"] == "Editor URI" + assert admin_response_data[2]["license_text"] == "license text" assert editor_response_data[0]["identifier"] == "Identifier" assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" assert editor_response_data[0]["rights"] == "Rights" assert editor_response_data[0]["uri"] == "URI" + assert editor_response_data[0]["license_text"] == "license text" assert editor_response_data[1]["identifier"] == "Admin Identifier" assert editor_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" assert editor_response_data[1]["rights"] == "Admin Rights" assert editor_response_data[1]["uri"] == "Admin URI" + assert editor_response_data[1]["license_text"] == "license text" assert editor_response_data[2]["identifier"] == "Editor Identifier" assert editor_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" assert editor_response_data[2]["rights"] == "Editor Rights" assert editor_response_data[2]["uri"] == "Editor URI" + assert editor_response_data[2]["license_text"] == "license text" assert viewer_response_data[0]["identifier"] == "Identifier" assert viewer_response_data[0]["identifier_scheme"] == "Identifier Scheme" assert viewer_response_data[0]["rights"] == "Rights" assert viewer_response_data[0]["uri"] == "URI" + assert viewer_response_data[0]["license_text"] == "license text" assert viewer_response_data[1]["identifier"] == "Admin Identifier" assert viewer_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" assert viewer_response_data[1]["rights"] == "Admin Rights" assert viewer_response_data[1]["uri"] == "Admin URI" + assert viewer_response_data[1]["license_text"] == "license text" assert viewer_response_data[2]["identifier"] == "Editor Identifier" assert viewer_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" assert viewer_response_data[2]["rights"] == "Editor Rights" assert viewer_response_data[2]["uri"] == "Editor URI" + assert viewer_response_data[2]["license_text"] == "license text" def test_delete_dataset_rights_metadata(clients): diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index e4ac4ce0..20c1e265 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -669,6 +669,7 @@ def test_get_version_dataset_metadata(clients): "identifier_scheme": "Identifier Scheme", "rights": "Rights", "uri": "URI", + "license_text": "license text", } ], ) From ff04d071c651bff1b15474c262ac6dc5287fe20e Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Thu, 28 Dec 2023 11:47:43 -0800 Subject: [PATCH 391/505] feat: added study metadata keywords (#37) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added testing license_text dataset metadata * style: 🎨 fix code style issues with Black * feat: added testing for study keywords * feat: added study keywords endpoint * fix: db is getting cleared in test env * fix: format * fix: removed comments --------- Co-authored-by: Lint Action --- apis/study_metadata/study_other.py | 48 ++++++++- model/study.py | 1 + model/study_metadata/study_other.py | 13 +-- pyproject.toml | 9 +- tests/functional/test_study_metadata_api.py | 110 ++++++++++++++++++++ tests/functional/test_study_version_api.py | 5 +- 6 files changed, 173 insertions(+), 13 deletions(-) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index d9ecd765..a96fb386 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -65,9 +65,8 @@ def put(self, study_id: int): return study_oversight, 200 -# todo: rename class @api.route("/study//metadata/conditions") -class StudyConditionsResource(Resource): +class StudyCondition(Resource): """Study Conditions Metadata""" @api.doc("conditions") @@ -109,3 +108,48 @@ def put(self, study_id: int): model.db.session.commit() return study_obj.study_other.conditions, 200 + + +@api.route("/study//metadata/keywords") +class StudyKeywords(Resource): + """Study Keywords Metadata""" + + @api.doc("keywords") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(study_other) + def get(self, study_id: int): + """Get study keywords metadata""" + study_ = model.Study.query.get(study_id) + + study_other_keywords = study_.study_other.keywords + + return study_other_keywords, 200 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int): + """Update study keywords metadata""" + # Schema validation + schema = { + "type": "array", + "items": {"type": "string", "minLength": 1}, + "minItems": 1, + "uniqueItems": True, + "additionalItems": False, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[dict, typing.Any] = request.json + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not modify study", 403 + study_obj.study_other.keywords = data + study_obj.touch() + model.db.session.commit() + + return study_obj.study_other.keywords, 200 diff --git a/model/study.py b/model/study.py index 667b6f6f..0c366e7e 100644 --- a/model/study.py +++ b/model/study.py @@ -217,6 +217,7 @@ def to_dict_study_metadata(self): "status": self.study_status.to_dict_metadata(), "oversight": self.study_other.oversight_has_dmc, "conditions": self.study_other.conditions, + "keywords": self.study_other.keywords, } @staticmethod diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 237d5b98..1d7cd8e3 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -41,12 +41,13 @@ def to_dict(self): "size": self.size, } - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "oversight_has_dmc": self.oversight_has_dmc, - "conditions": self.conditions, - } + # def to_dict_metadata(self): + # """Converts the study metadata to a dictionary""" + # return { + # "oversight_has_dmc": self.oversight_has_dmc, + # "conditions": self.conditions, + # "keywords": self.keywords + # } @staticmethod def from_data(study: Study, data: dict): diff --git a/pyproject.toml b/pyproject.toml index f47580ca..7f9a483a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -135,8 +135,13 @@ lint = ["flake8", "typecheck", "pylint"] precommit = ["format", "flake8", "typecheck", "pylint"] -test = "pytest -rx -W ignore::DeprecationWarning" -test_with_capture = "pytest -s -W ignore::DeprecationWarning" +pytest = "pytest -rx -W ignore::DeprecationWarning" +pytest_with_capture = "pytest -s -W ignore::DeprecationWarning" + +destroy-schema = "flask destroy-schema" + +test = ["pytest", "destroy-schema"] +test_with_capture = ["pytest_with_capture", "destroy-schema"] jupyter = "jupyter notebook" diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index 8cde58ac..848b13f0 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -827,6 +827,116 @@ def test_get_conditions_metadata(clients): assert viewer_response_data[3] == "editor-size string" +# ------------------- KEYWORDS METADATA ------------------- # +def test_put_keywords_metadata(clients): + """ + GIVEN a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (POST) + THEN check that the response is valid and creates the keywords metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.put( + f"/study/{study_id}/metadata/keywords", + json=[ + "true", + "conditions string", + "keywords string", + "size string", + ], + ) + + assert response.status_code == 200 + response_data = json.loads(response.data) + + assert response_data[0] == "true" + assert response_data[1] == "conditions string" + assert response_data[2] == "keywords string" + assert response_data[3] == "size string" + + admin_response = _admin_client.put( + f"/study/{study_id}/metadata/keywords", + json=[ + "true", + "admin-conditions string", + "admin-keywords string", + "admin-size string", + ], + ) + + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + + assert admin_response_data[0] == "true" + assert admin_response_data[1] == "admin-conditions string" + assert admin_response_data[2] == "admin-keywords string" + assert admin_response_data[3] == "admin-size string" + + editor_response = _editor_client.put( + f"/study/{study_id}/metadata/keywords", + json=[ + "true", + "editor-conditions string", + "editor-keywords string", + "editor-size string", + ], + ) + + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + + assert editor_response_data[0] == "true" + assert editor_response_data[1] == "editor-conditions string" + assert editor_response_data[2] == "editor-keywords string" + assert editor_response_data[3] == "editor-size string" + + +def test_get_keywords_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) + THEN check that the response is valid and retrieves the keywords metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.get(f"/study/{study_id}/metadata/keywords") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/keywords") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/keywords") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/keywords") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data[0] == "true" + assert response_data[1] == "editor-conditions string" + assert response_data[2] == "editor-keywords string" + assert response_data[3] == "editor-size string" + + assert admin_response_data[0] == "true" + assert admin_response_data[1] == "editor-conditions string" + assert admin_response_data[2] == "editor-keywords string" + assert admin_response_data[3] == "editor-size string" + + assert editor_response_data[0] == "true" + assert editor_response_data[1] == "editor-conditions string" + assert editor_response_data[2] == "editor-keywords string" + assert editor_response_data[3] == "editor-size string" + + assert viewer_response_data[0] == "true" + assert viewer_response_data[1] == "editor-conditions string" + assert viewer_response_data[2] == "editor-keywords string" + assert viewer_response_data[3] == "editor-size string" + + # ------------------- DESCRIPTION METADATA ------------------- # def test_put_description_metadata(clients): """ diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index 20c1e265..874c18dc 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -30,7 +30,6 @@ def test_post_dataset_version(clients): assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_version_id = response_data["id"] - assert response_data["title"] == "Dataset Version 1.0" assert response_data["published"] is False assert response_data["doi"] == "doi:test" @@ -145,7 +144,7 @@ def test_put_dataset_version(clients): Then check that the response is valid and updates the dataset version """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] + study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id version_id = pytest.global_dataset_version_id @@ -1187,7 +1186,7 @@ def test_delete_dataset_version(clients): Then check that the response is valid and deletes the dataset version """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] + study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id version_id = pytest.global_dataset_version_id From 60c954b8dee84d12b6693b86d24df0bf2ae3ecd8 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 28 Dec 2023 12:27:14 -0800 Subject: [PATCH 392/505] =?UTF-8?q?=F0=9F=9A=A8=20fix:=20fix=20mypy=20warn?= =?UTF-8?q?ing?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/utils.py | 2 +- core/utils.py | 7 +------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/apis/utils.py b/apis/utils.py index d797b2b8..c5a164c3 100644 --- a/apis/utils.py +++ b/apis/utils.py @@ -1,6 +1,6 @@ """Utils Endpoints""" from flask import request -from flask_restx import Resource, Namespace +from flask_restx import Namespace, Resource from core.utils import request_json diff --git a/core/utils.py b/core/utils.py index cde0b34b..c847973a 100644 --- a/core/utils.py +++ b/core/utils.py @@ -5,12 +5,7 @@ def request_json(url): """ "Request JSON from URL""" try: - payload = {} - headers = {} - - response = requests.request( - "GET", url, headers=headers, data=payload, timeout=10 - ) + response = requests.request("GET", url, headers={}, data={}, timeout=10) return response.json() except Exception as e: From 7a5a63b4f103b6c4e18cc0212c228645af6b5ec9 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Mon, 15 Jan 2024 10:09:33 -0800 Subject: [PATCH 393/505] feat: DOI reserved for version table (#39) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added AI identifier column to version * fix: modified test for DOI column * fix: format * fix: alembic version * style: 🎨 fix code style issues with Black * fix: identifier name * fix: unique added to doi column * style: 🎨 fix code style issues with Black * fix: doi nullability --------- Co-authored-by: Lint Action --- .../0defbfc71c59_add_identifier_to_version.py | 29 +++++++++++++++++ apis/dataset.py | 5 ++- model/version.py | 16 +++++++--- tests/functional/test_study_version_api.py | 32 +++++++++---------- 4 files changed, 60 insertions(+), 22 deletions(-) create mode 100644 alembic/versions/0defbfc71c59_add_identifier_to_version.py diff --git a/alembic/versions/0defbfc71c59_add_identifier_to_version.py b/alembic/versions/0defbfc71c59_add_identifier_to_version.py new file mode 100644 index 00000000..5dbdd7d2 --- /dev/null +++ b/alembic/versions/0defbfc71c59_add_identifier_to_version.py @@ -0,0 +1,29 @@ +"""add identifier to version + +Revision ID: 0defbfc71c59 +Revises: 29e42ce4be3f +Create Date: 2024-01-05 13:25:15.547450 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "0defbfc71c59" +down_revision: Union[str, None] = "29e42ce4be3f" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + """ALTER TABLE version + ADD COLUMN identifier SERIAL + """ + ) + op.execute("UPDATE version SET identifier = 1") + op.create_unique_constraint("unique_identifier", "version", ["identifier"]) + op.create_unique_constraint("unique_doi", "version", ["doi"]) diff --git a/apis/dataset.py b/apis/dataset.py index 0c32921c..c1b0ac09 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -17,10 +17,11 @@ "title": fields.String(required=True), "changelog": fields.String(required=True), "created_at": fields.String(required=True), - "doi": fields.String(required=True), + "doi": fields.String(required=False), "published": fields.Boolean(required=True), "participants": fields.List(fields.String, required=True), "published_on": fields.String(required=True), + "identifier": fields.Integer(required=True), }, ) @@ -192,6 +193,8 @@ def post(self, study_id: int, dataset_id: int): dataset_versions = model.Version.from_data(data_obj, data) model.db.session.add(dataset_versions) model.db.session.commit() + dataset_versions.doi = f"10.fairhub/{dataset_versions.identifier}" + model.db.session.commit() return dataset_versions.to_dict(), 201 diff --git a/model/version.py b/model/version.py index 3aa3bb5c..f1d34245 100644 --- a/model/version.py +++ b/model/version.py @@ -1,8 +1,7 @@ import datetime import uuid from datetime import timezone - -from sqlalchemy import Table +from sqlalchemy import Table, Sequence import model from model.dataset import Dataset @@ -31,10 +30,19 @@ def __init__(self, dataset): published = db.Column(db.BOOLEAN, nullable=False) changelog = db.Column(db.String, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) - doi = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) published_on = db.Column(db.BigInteger, nullable=False) + identifier = db.Column( + db.Integer, + Sequence("version_identifier_seq"), + nullable=False, + unique=True, + server_default=Sequence("version_identifier_seq").next_value(), + ) + + doi = db.Column(db.String, nullable=True, unique=True) + version_readme = db.relationship( "VersionReadme", uselist=False, @@ -57,6 +65,7 @@ def to_dict(self): "published_on": self.published_on, "updated_on": self.updated_on, "created_at": self.created_at, + "identifier": self.identifier, "doi": self.doi, "published": self.published, "readme": self.version_readme.content if self.version_readme else "" @@ -76,7 +85,6 @@ def from_data(dataset: Dataset, data: dict): def update(self, data: dict): self.title = data["title"] self.published = data["published"] if "published" in data else False - self.doi = data["doi"] if "doi" in data else "" self.published_on = datetime.datetime.now(timezone.utc).timestamp() self.updated_on = datetime.datetime.now(timezone.utc).timestamp() # self.participants[:] = data["participants"] diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index 874c18dc..fea3398a 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -22,17 +22,15 @@ def test_post_dataset_version(clients): json={ "title": "Dataset Version 1.0", "published": False, - "doi": "doi:test", "changelog": "changelog testing here", }, ) - assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_version_id = response_data["id"] assert response_data["title"] == "Dataset Version 1.0" assert response_data["published"] is False - assert response_data["doi"] == "doi:test" + assert response_data["doi"] == f"10.fairhub/{response_data['identifier']}" assert response_data["changelog"] == "changelog testing here" @@ -70,20 +68,23 @@ def test_get_all_dataset_versions(clients): assert len(response_data) == 1 assert len(admin_response_data) == 1 assert len(editor_response_data) == 1 - assert response_data[0]["title"] == "Dataset Version 1.0" assert response_data[0]["published"] is False - assert response_data[0]["doi"] == "doi:test" assert response_data[0]["changelog"] == "changelog testing here" + assert response_data[0]["doi"] == f"10.fairhub/{response_data[0]['identifier']}" assert admin_response_data[0]["title"] == "Dataset Version 1.0" assert admin_response_data[0]["published"] is False - assert admin_response_data[0]["doi"] == "doi:test" + assert ( + admin_response_data[0]["doi"] == f"10.fairhub/{response_data[0]['identifier']}" + ) assert admin_response_data[0]["changelog"] == "changelog testing here" assert editor_response_data[0]["title"] == "Dataset Version 1.0" assert editor_response_data[0]["published"] is False - assert editor_response_data[0]["doi"] == "doi:test" + assert ( + editor_response_data[0]["doi"] == f"10.fairhub/{response_data[0]['identifier']}" + ) assert editor_response_data[0]["changelog"] == "changelog testing here" @@ -122,17 +123,17 @@ def test_get_dataset_version(clients): assert response_data["title"] == "Dataset Version 1.0" assert response_data["published"] is False - assert response_data["doi"] == "doi:test" assert response_data["changelog"] == "changelog testing here" + assert response_data["doi"] == f"10.fairhub/{response_data['identifier']}" assert admin_response_data["title"] == "Dataset Version 1.0" assert admin_response_data["published"] is False - assert admin_response_data["doi"] == "doi:test" + assert admin_response_data["doi"] == f"10.fairhub/{response_data['identifier']}" assert admin_response_data["changelog"] == "changelog testing here" assert editor_response_data["title"] == "Dataset Version 1.0" assert editor_response_data["published"] is False - assert editor_response_data["doi"] == "doi:test" + assert editor_response_data["doi"] == f"10.fairhub/{response_data['identifier']}" assert editor_response_data["changelog"] == "changelog testing here" @@ -154,19 +155,18 @@ def test_put_dataset_version(clients): "title": "Dataset Version 2.0", "changelog": "Updating the changelog", "published": False, - "doi": "doi:test123", "readme": "readme testing here", }, ) assert response.status_code == 200 response_data = json.loads(response.data) - print(response_data) assert response_data["title"] == "Dataset Version 2.0" assert response_data["changelog"] == "Updating the changelog" - assert response_data["doi"] == "doi:test123" + assert response_data["doi"] == f"10.fairhub/{response_data['identifier']}" assert response_data["readme"] == "" + assert response_data["published"] is False admin_response = _admin_client.put( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", @@ -174,7 +174,6 @@ def test_put_dataset_version(clients): "title": "Dataset Version 3.0", "changelog": "Changelog modified by admin", "published": False, - "doi": "doi:test", "readme": "readme modified by editor", }, ) @@ -185,7 +184,8 @@ def test_put_dataset_version(clients): assert admin_response_data["title"] == "Dataset Version 3.0" assert admin_response_data["changelog"] == "Changelog modified by admin" assert admin_response_data["published"] is False - assert admin_response_data["doi"] == "doi:test" + assert admin_response_data["doi"] == f"10.fairhub/{response_data['identifier']}" + assert admin_response_data["readme"] == "" editor_response = _editor_client.put( @@ -194,7 +194,6 @@ def test_put_dataset_version(clients): "title": "Dataset Version 4.0", "changelog": "Changelog modified by editor", "published": False, - "doi": "doi:test", "readme": "readme modified by editor", }, ) @@ -207,7 +206,6 @@ def test_put_dataset_version(clients): "title": "Dataset Version 5.0", "changelog": "Changelog modified by viewer", "published": False, - "doi": "test:doi", "readme": "readme modified by viewer", }, ) From 14353980eaa4d9b12305833dd49a28f12e9674e4 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 15 Jan 2024 12:07:13 -0800 Subject: [PATCH 394/505] Revert "feat: DOI reserved for version table" (#40) --- .../0defbfc71c59_add_identifier_to_version.py | 29 ----------------- apis/dataset.py | 5 +-- model/version.py | 16 +++------- tests/functional/test_study_version_api.py | 32 ++++++++++--------- 4 files changed, 22 insertions(+), 60 deletions(-) delete mode 100644 alembic/versions/0defbfc71c59_add_identifier_to_version.py diff --git a/alembic/versions/0defbfc71c59_add_identifier_to_version.py b/alembic/versions/0defbfc71c59_add_identifier_to_version.py deleted file mode 100644 index 5dbdd7d2..00000000 --- a/alembic/versions/0defbfc71c59_add_identifier_to_version.py +++ /dev/null @@ -1,29 +0,0 @@ -"""add identifier to version - -Revision ID: 0defbfc71c59 -Revises: 29e42ce4be3f -Create Date: 2024-01-05 13:25:15.547450 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "0defbfc71c59" -down_revision: Union[str, None] = "29e42ce4be3f" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - op.execute( - """ALTER TABLE version - ADD COLUMN identifier SERIAL - """ - ) - op.execute("UPDATE version SET identifier = 1") - op.create_unique_constraint("unique_identifier", "version", ["identifier"]) - op.create_unique_constraint("unique_doi", "version", ["doi"]) diff --git a/apis/dataset.py b/apis/dataset.py index c1b0ac09..0c32921c 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -17,11 +17,10 @@ "title": fields.String(required=True), "changelog": fields.String(required=True), "created_at": fields.String(required=True), - "doi": fields.String(required=False), + "doi": fields.String(required=True), "published": fields.Boolean(required=True), "participants": fields.List(fields.String, required=True), "published_on": fields.String(required=True), - "identifier": fields.Integer(required=True), }, ) @@ -193,8 +192,6 @@ def post(self, study_id: int, dataset_id: int): dataset_versions = model.Version.from_data(data_obj, data) model.db.session.add(dataset_versions) model.db.session.commit() - dataset_versions.doi = f"10.fairhub/{dataset_versions.identifier}" - model.db.session.commit() return dataset_versions.to_dict(), 201 diff --git a/model/version.py b/model/version.py index f1d34245..3aa3bb5c 100644 --- a/model/version.py +++ b/model/version.py @@ -1,7 +1,8 @@ import datetime import uuid from datetime import timezone -from sqlalchemy import Table, Sequence + +from sqlalchemy import Table import model from model.dataset import Dataset @@ -30,19 +31,10 @@ def __init__(self, dataset): published = db.Column(db.BOOLEAN, nullable=False) changelog = db.Column(db.String, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) + doi = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) published_on = db.Column(db.BigInteger, nullable=False) - identifier = db.Column( - db.Integer, - Sequence("version_identifier_seq"), - nullable=False, - unique=True, - server_default=Sequence("version_identifier_seq").next_value(), - ) - - doi = db.Column(db.String, nullable=True, unique=True) - version_readme = db.relationship( "VersionReadme", uselist=False, @@ -65,7 +57,6 @@ def to_dict(self): "published_on": self.published_on, "updated_on": self.updated_on, "created_at": self.created_at, - "identifier": self.identifier, "doi": self.doi, "published": self.published, "readme": self.version_readme.content if self.version_readme else "" @@ -85,6 +76,7 @@ def from_data(dataset: Dataset, data: dict): def update(self, data: dict): self.title = data["title"] self.published = data["published"] if "published" in data else False + self.doi = data["doi"] if "doi" in data else "" self.published_on = datetime.datetime.now(timezone.utc).timestamp() self.updated_on = datetime.datetime.now(timezone.utc).timestamp() # self.participants[:] = data["participants"] diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index fea3398a..874c18dc 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -22,15 +22,17 @@ def test_post_dataset_version(clients): json={ "title": "Dataset Version 1.0", "published": False, + "doi": "doi:test", "changelog": "changelog testing here", }, ) + assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_version_id = response_data["id"] assert response_data["title"] == "Dataset Version 1.0" assert response_data["published"] is False - assert response_data["doi"] == f"10.fairhub/{response_data['identifier']}" + assert response_data["doi"] == "doi:test" assert response_data["changelog"] == "changelog testing here" @@ -68,23 +70,20 @@ def test_get_all_dataset_versions(clients): assert len(response_data) == 1 assert len(admin_response_data) == 1 assert len(editor_response_data) == 1 + assert response_data[0]["title"] == "Dataset Version 1.0" assert response_data[0]["published"] is False + assert response_data[0]["doi"] == "doi:test" assert response_data[0]["changelog"] == "changelog testing here" - assert response_data[0]["doi"] == f"10.fairhub/{response_data[0]['identifier']}" assert admin_response_data[0]["title"] == "Dataset Version 1.0" assert admin_response_data[0]["published"] is False - assert ( - admin_response_data[0]["doi"] == f"10.fairhub/{response_data[0]['identifier']}" - ) + assert admin_response_data[0]["doi"] == "doi:test" assert admin_response_data[0]["changelog"] == "changelog testing here" assert editor_response_data[0]["title"] == "Dataset Version 1.0" assert editor_response_data[0]["published"] is False - assert ( - editor_response_data[0]["doi"] == f"10.fairhub/{response_data[0]['identifier']}" - ) + assert editor_response_data[0]["doi"] == "doi:test" assert editor_response_data[0]["changelog"] == "changelog testing here" @@ -123,17 +122,17 @@ def test_get_dataset_version(clients): assert response_data["title"] == "Dataset Version 1.0" assert response_data["published"] is False + assert response_data["doi"] == "doi:test" assert response_data["changelog"] == "changelog testing here" - assert response_data["doi"] == f"10.fairhub/{response_data['identifier']}" assert admin_response_data["title"] == "Dataset Version 1.0" assert admin_response_data["published"] is False - assert admin_response_data["doi"] == f"10.fairhub/{response_data['identifier']}" + assert admin_response_data["doi"] == "doi:test" assert admin_response_data["changelog"] == "changelog testing here" assert editor_response_data["title"] == "Dataset Version 1.0" assert editor_response_data["published"] is False - assert editor_response_data["doi"] == f"10.fairhub/{response_data['identifier']}" + assert editor_response_data["doi"] == "doi:test" assert editor_response_data["changelog"] == "changelog testing here" @@ -155,18 +154,19 @@ def test_put_dataset_version(clients): "title": "Dataset Version 2.0", "changelog": "Updating the changelog", "published": False, + "doi": "doi:test123", "readme": "readme testing here", }, ) assert response.status_code == 200 response_data = json.loads(response.data) + print(response_data) assert response_data["title"] == "Dataset Version 2.0" assert response_data["changelog"] == "Updating the changelog" - assert response_data["doi"] == f"10.fairhub/{response_data['identifier']}" + assert response_data["doi"] == "doi:test123" assert response_data["readme"] == "" - assert response_data["published"] is False admin_response = _admin_client.put( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", @@ -174,6 +174,7 @@ def test_put_dataset_version(clients): "title": "Dataset Version 3.0", "changelog": "Changelog modified by admin", "published": False, + "doi": "doi:test", "readme": "readme modified by editor", }, ) @@ -184,8 +185,7 @@ def test_put_dataset_version(clients): assert admin_response_data["title"] == "Dataset Version 3.0" assert admin_response_data["changelog"] == "Changelog modified by admin" assert admin_response_data["published"] is False - assert admin_response_data["doi"] == f"10.fairhub/{response_data['identifier']}" - + assert admin_response_data["doi"] == "doi:test" assert admin_response_data["readme"] == "" editor_response = _editor_client.put( @@ -194,6 +194,7 @@ def test_put_dataset_version(clients): "title": "Dataset Version 4.0", "changelog": "Changelog modified by editor", "published": False, + "doi": "doi:test", "readme": "readme modified by editor", }, ) @@ -206,6 +207,7 @@ def test_put_dataset_version(clients): "title": "Dataset Version 5.0", "changelog": "Changelog modified by viewer", "published": False, + "doi": "test:doi", "readme": "readme modified by viewer", }, ) From 992d6fd1fbf6a7975bc53f61158d7255536968e6 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 18 Jan 2024 10:10:21 -0800 Subject: [PATCH 395/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20rename=20container?= =?UTF-8?q?=20image?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../workflows/build-and-deploy-to-main.yml | 24 +++---- .../workflows/build-and-deploy-to-staging.yml | 30 ++++----- .github/workflows/deploy-app-to-main-slot.yml | 67 ------------------- apis/file.py | 2 + 4 files changed, 29 insertions(+), 94 deletions(-) delete mode 100644 .github/workflows/deploy-app-to-main-slot.yml diff --git a/.github/workflows/build-and-deploy-to-main.yml b/.github/workflows/build-and-deploy-to-main.yml index 1f15603d..e41fff74 100644 --- a/.github/workflows/build-and-deploy-to-main.yml +++ b/.github/workflows/build-and-deploy-to-main.yml @@ -1,13 +1,13 @@ # Will be deployed to https://api.fairhub.io -name: (main) Build and push api image to Azure Container Registry +name: (main) Build and push api image to Azure Container Registry on: - pull_request: - types: [closed] - branches: - - main - workflow_dispatch: + pull_request: + types: [closed] + branches: + - main + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -22,7 +22,7 @@ jobs: AZURE_REGISTRY_LOGIN_SERVER: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} AZURE_REGISTRY_USERNAME: ${{ secrets.AZURE_REGISTRY_USERNAME }} AZURE_REGISTRY_PASSWORD: ${{ secrets.AZURE_REGISTRY_PASSWORD }} - + steps: - name: Checkout uses: actions/checkout@v2 @@ -32,21 +32,21 @@ jobs: with: ref: main repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Run formatter' - + check-name: "Run formatter" + - name: Wait for linting to pass uses: lewagon/wait-on-check-action@v1.3.1 with: ref: main repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Run linters (3.10)' + check-name: "Run linters (3.10)" - name: Wait for tests to pass uses: lewagon/wait-on-check-action@v1.3.1 with: ref: main repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Run tests (3.10)' + check-name: "Run tests (3.10)" - name: Login to Azure Container Registry uses: azure/docker-login@v1 @@ -65,4 +65,4 @@ jobs: context: . file: ./Dockerfile push: true - tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:latest,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:${{ steps.git_sha.outputs.sha }} \ No newline at end of file + tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub-api:latest,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub-api:${{ steps.git_sha.outputs.sha }} diff --git a/.github/workflows/build-and-deploy-to-staging.yml b/.github/workflows/build-and-deploy-to-staging.yml index a1a32b2f..5cd20ba8 100644 --- a/.github/workflows/build-and-deploy-to-staging.yml +++ b/.github/workflows/build-and-deploy-to-staging.yml @@ -1,16 +1,16 @@ # Deployed to https://staging.api.fairhub.io -name: (staging) Build and push api image to Azure Container Registry +name: (staging) Build and push api image to Azure Container Registry on: - push: - branches: - - staging - pull_request: - types: [closed] - branches: - - staging - workflow_dispatch: + push: + branches: + - staging + pull_request: + types: [closed] + branches: + - staging + workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} @@ -25,7 +25,7 @@ jobs: AZURE_REGISTRY_LOGIN_SERVER: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }} AZURE_REGISTRY_USERNAME: ${{ secrets.AZURE_REGISTRY_USERNAME }} AZURE_REGISTRY_PASSWORD: ${{ secrets.AZURE_REGISTRY_PASSWORD }} - + steps: - name: Checkout uses: actions/checkout@v2 @@ -35,21 +35,21 @@ jobs: with: ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Run formatter' - + check-name: "Run formatter" + - name: Wait for linting to pass uses: lewagon/wait-on-check-action@v1.3.1 with: ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Run linters (3.10)' + check-name: "Run linters (3.10)" - name: Wait for tests to pass uses: lewagon/wait-on-check-action@v1.3.1 with: ref: staging repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: 'Run tests (3.10)' + check-name: "Run tests (3.10)" - name: Login to Azure Container Registry uses: azure/docker-login@v1 @@ -68,4 +68,4 @@ jobs: context: . file: ./Dockerfile push: true - tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:staging,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub:${{ steps.git_sha.outputs.sha }} \ No newline at end of file + tags: ${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub-api:staging,${{ secrets.AZURE_REGISTRY_LOGIN_SERVER }}/fairhub-api:${{ steps.git_sha.outputs.sha }} diff --git a/.github/workflows/deploy-app-to-main-slot.yml b/.github/workflows/deploy-app-to-main-slot.yml deleted file mode 100644 index 1ae12f5c..00000000 --- a/.github/workflows/deploy-app-to-main-slot.yml +++ /dev/null @@ -1,67 +0,0 @@ -# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy -# More GitHub Actions for Azure: https://github.com/Azure/actions -# More info on Python, GitHub Actions, and Azure App Service: https://aka.ms/python-webapps-actions - -name: Build and deploy Python app to Azure Web App - api-fairhub-io - -on: - push: - branches: - - main - pull_request: - types: [opened, synchronize, reopened, closed] - branches: - - main - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python version - uses: actions/setup-python@v1 - with: - python-version: "3.8" - - - name: Create and start virtual environment - run: | - python -m venv venv - source venv/bin/activate - - - name: Install dependencies - run: pip install poetry==1.3.2 && poetry install - - # Optional: Add step to run tests here (PyTest, Django test suites, etc.) - - - name: Upload artifact for deployment jobs - uses: actions/upload-artifact@v2 - with: - name: python-app - path: | - . - !venv/ - - deploy: - runs-on: ubuntu-latest - needs: build - environment: - name: "main" - url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} - - steps: - - name: Download artifact from build job - uses: actions/download-artifact@v2 - with: - name: python-app - path: . - - - name: "Deploy to Azure Web App" - uses: azure/webapps-deploy@v2 - id: deploy-to-webapp - with: - app-name: "api-fairhub-io" - slot-name: "main" - publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_6C6B1227CD464BFDBC9173E57CDEEC65 }} diff --git a/apis/file.py b/apis/file.py index 27434bcd..4e8fc4e4 100644 --- a/apis/file.py +++ b/apis/file.py @@ -81,6 +81,8 @@ def get(self, study_id): # pylint: disable=unused-argument response_json = response.json() + print(response_json) + paths = [] for file in response_json["paths"]: From 0f9e18c9db896c99a557647e11647d7b37570443 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 18 Jan 2024 11:21:27 -0800 Subject: [PATCH 396/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20update=20domains?= =?UTF-8?q?=20to=20app?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 +- app.py | 2 ++ pyproject.toml | 4 ++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index b2c62df3..cf8ed8ed 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# api.fairhub.io +# fairhub-api ## Getting started diff --git a/app.py b/app.py index e3402b59..b26071d7 100644 --- a/app.py +++ b/app.py @@ -66,6 +66,8 @@ def create_app(config_module=None): cors_origins = [ "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://staging.app.fairhub.io", + "https://app.fairhub.io", "https://staging.fairhub.io", "https://fairhub.io", ] diff --git a/pyproject.toml b/pyproject.toml index 7f9a483a..4ad5329f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,8 @@ [tool.poetry] -name = "api-fairhub-io" +name = "fairhub-api" version = "0.1.0" -description = "API for fairhub.io" +description = "API for fairhub" license = "MIT" authors = ["FAIR Data Innovations Hub "] From c0aa648bb3661eca78ae2710d86f782343030922 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 18 Jan 2024 12:19:16 -0800 Subject: [PATCH 397/505] =?UTF-8?q?=F0=9F=9A=91=20fix:=20disable=20email?= =?UTF-8?q?=20deliverability=20check?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apis/authentication.py b/apis/authentication.py index bdaf33b0..48eefe53 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -173,7 +173,7 @@ def validate_is_valid_email(instance): email_address = instance try: - validate_email(email_address) + validate_email(email_address, check_deliverability=False) return True except EmailNotValidError as e: raise ValidationError("Invalid email address format") from e From 76d311275d9a796aa47fc8185ad74c084afee55c Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 19 Jan 2024 14:34:07 -0800 Subject: [PATCH 398/505] feat: DOI reserved for version table (#41) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added AI identifier column to version * fix: modified test for DOI column * fix: format * fix: alembic version * style: 🎨 fix code style issues with Black * fix: identifier name * fix: unique added to doi column * style: 🎨 fix code style issues with Black * fix: doi nullability * fix: doi nullability * fix: doi integrity error * style:format * fix: doi format * style: 🎨 fix code style issues with Black --------- Co-authored-by: aydawka Co-authored-by: Lint Action --- .../0defbfc71c59_add_identifier_to_version.py | 32 ++++++++++++++ apis/dataset.py | 5 ++- model/version.py | 15 +++++-- tests/functional/test_study_version_api.py | 42 +++++++++++-------- 4 files changed, 73 insertions(+), 21 deletions(-) create mode 100644 alembic/versions/0defbfc71c59_add_identifier_to_version.py diff --git a/alembic/versions/0defbfc71c59_add_identifier_to_version.py b/alembic/versions/0defbfc71c59_add_identifier_to_version.py new file mode 100644 index 00000000..8adc24e0 --- /dev/null +++ b/alembic/versions/0defbfc71c59_add_identifier_to_version.py @@ -0,0 +1,32 @@ +"""add identifier to version + +Revision ID: 0defbfc71c59 +Revises: 29e42ce4be3f +Create Date: 2024-01-05 13:25:15.547450 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "0defbfc71c59" +down_revision: Union[str, None] = "29e42ce4be3f" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("version") as batch_op: + batch_op.alter_column("doi", nullable=True) + + op.execute( + """ALTER TABLE version + ADD COLUMN identifier SERIAL + """ + ) + op.execute(f"UPDATE version SET doi = '10.36478/fairhub.' || identifier::TEXT") + op.create_unique_constraint("unique_identifier", "version", ["identifier"]) + op.create_unique_constraint("unique_doi", "version", ["doi"]) diff --git a/apis/dataset.py b/apis/dataset.py index 0c32921c..5524583c 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -17,10 +17,11 @@ "title": fields.String(required=True), "changelog": fields.String(required=True), "created_at": fields.String(required=True), - "doi": fields.String(required=True), + "doi": fields.String(required=False), "published": fields.Boolean(required=True), "participants": fields.List(fields.String, required=True), "published_on": fields.String(required=True), + "identifier": fields.Integer(required=True), }, ) @@ -192,6 +193,8 @@ def post(self, study_id: int, dataset_id: int): dataset_versions = model.Version.from_data(data_obj, data) model.db.session.add(dataset_versions) model.db.session.commit() + dataset_versions.doi = f"10.36478/fairhub.{dataset_versions.identifier}" + model.db.session.commit() return dataset_versions.to_dict(), 201 diff --git a/model/version.py b/model/version.py index 3aa3bb5c..6cfbfa83 100644 --- a/model/version.py +++ b/model/version.py @@ -2,7 +2,7 @@ import uuid from datetime import timezone -from sqlalchemy import Table +from sqlalchemy import Sequence, Table import model from model.dataset import Dataset @@ -31,10 +31,19 @@ def __init__(self, dataset): published = db.Column(db.BOOLEAN, nullable=False) changelog = db.Column(db.String, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) - doi = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) published_on = db.Column(db.BigInteger, nullable=False) + identifier = db.Column( + db.Integer, + Sequence("version_identifier_seq"), + nullable=False, + unique=True, + server_default=Sequence("version_identifier_seq").next_value(), + ) + + doi = db.Column(db.String, nullable=True, unique=True) + version_readme = db.relationship( "VersionReadme", uselist=False, @@ -57,6 +66,7 @@ def to_dict(self): "published_on": self.published_on, "updated_on": self.updated_on, "created_at": self.created_at, + "identifier": self.identifier, "doi": self.doi, "published": self.published, "readme": self.version_readme.content if self.version_readme else "" @@ -76,7 +86,6 @@ def from_data(dataset: Dataset, data: dict): def update(self, data: dict): self.title = data["title"] self.published = data["published"] if "published" in data else False - self.doi = data["doi"] if "doi" in data else "" self.published_on = datetime.datetime.now(timezone.utc).timestamp() self.updated_on = datetime.datetime.now(timezone.utc).timestamp() # self.participants[:] = data["participants"] diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index 874c18dc..5a67e23a 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -22,17 +22,15 @@ def test_post_dataset_version(clients): json={ "title": "Dataset Version 1.0", "published": False, - "doi": "doi:test", "changelog": "changelog testing here", }, ) - assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_version_id = response_data["id"] assert response_data["title"] == "Dataset Version 1.0" assert response_data["published"] is False - assert response_data["doi"] == "doi:test" + assert response_data["doi"] == f"10.36478/fairhub.{response_data['identifier']}" assert response_data["changelog"] == "changelog testing here" @@ -70,20 +68,27 @@ def test_get_all_dataset_versions(clients): assert len(response_data) == 1 assert len(admin_response_data) == 1 assert len(editor_response_data) == 1 - assert response_data[0]["title"] == "Dataset Version 1.0" assert response_data[0]["published"] is False - assert response_data[0]["doi"] == "doi:test" assert response_data[0]["changelog"] == "changelog testing here" + assert ( + response_data[0]["doi"] == f"10.36478/fairhub.{response_data[0]['identifier']}" + ) assert admin_response_data[0]["title"] == "Dataset Version 1.0" assert admin_response_data[0]["published"] is False - assert admin_response_data[0]["doi"] == "doi:test" + assert ( + admin_response_data[0]["doi"] + == f"10.36478/fairhub.{response_data[0]['identifier']}" + ) assert admin_response_data[0]["changelog"] == "changelog testing here" assert editor_response_data[0]["title"] == "Dataset Version 1.0" assert editor_response_data[0]["published"] is False - assert editor_response_data[0]["doi"] == "doi:test" + assert ( + editor_response_data[0]["doi"] + == f"10.36478/fairhub.{response_data[0]['identifier']}" + ) assert editor_response_data[0]["changelog"] == "changelog testing here" @@ -122,17 +127,21 @@ def test_get_dataset_version(clients): assert response_data["title"] == "Dataset Version 1.0" assert response_data["published"] is False - assert response_data["doi"] == "doi:test" assert response_data["changelog"] == "changelog testing here" + assert response_data["doi"] == f"10.36478/fairhub.{response_data['identifier']}" assert admin_response_data["title"] == "Dataset Version 1.0" assert admin_response_data["published"] is False - assert admin_response_data["doi"] == "doi:test" + assert ( + admin_response_data["doi"] == f"10.36478/fairhub.{response_data['identifier']}" + ) assert admin_response_data["changelog"] == "changelog testing here" assert editor_response_data["title"] == "Dataset Version 1.0" assert editor_response_data["published"] is False - assert editor_response_data["doi"] == "doi:test" + assert ( + editor_response_data["doi"] == f"10.36478/fairhub.{response_data['identifier']}" + ) assert editor_response_data["changelog"] == "changelog testing here" @@ -154,19 +163,18 @@ def test_put_dataset_version(clients): "title": "Dataset Version 2.0", "changelog": "Updating the changelog", "published": False, - "doi": "doi:test123", "readme": "readme testing here", }, ) assert response.status_code == 200 response_data = json.loads(response.data) - print(response_data) assert response_data["title"] == "Dataset Version 2.0" assert response_data["changelog"] == "Updating the changelog" - assert response_data["doi"] == "doi:test123" + assert response_data["doi"] == f"10.36478/fairhub.{response_data['identifier']}" assert response_data["readme"] == "" + assert response_data["published"] is False admin_response = _admin_client.put( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}", @@ -174,7 +182,6 @@ def test_put_dataset_version(clients): "title": "Dataset Version 3.0", "changelog": "Changelog modified by admin", "published": False, - "doi": "doi:test", "readme": "readme modified by editor", }, ) @@ -185,7 +192,10 @@ def test_put_dataset_version(clients): assert admin_response_data["title"] == "Dataset Version 3.0" assert admin_response_data["changelog"] == "Changelog modified by admin" assert admin_response_data["published"] is False - assert admin_response_data["doi"] == "doi:test" + assert ( + admin_response_data["doi"] == f"10.36478/fairhub.{response_data['identifier']}" + ) + assert admin_response_data["readme"] == "" editor_response = _editor_client.put( @@ -194,7 +204,6 @@ def test_put_dataset_version(clients): "title": "Dataset Version 4.0", "changelog": "Changelog modified by editor", "published": False, - "doi": "doi:test", "readme": "readme modified by editor", }, ) @@ -207,7 +216,6 @@ def test_put_dataset_version(clients): "title": "Dataset Version 5.0", "changelog": "Changelog modified by viewer", "published": False, - "doi": "test:doi", "readme": "readme modified by viewer", }, ) From 8e603d5971767c66383d458a98c4b80298497c23 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 19 Jan 2024 16:27:41 -0800 Subject: [PATCH 399/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 3 +- apis/dashboard.py | 15 +++--- app.py | 6 ++- modules/etl/config/__init__.py | 2 +- modules/etl/config/aireadi_config.py | 57 ++++++++++++++++------ modules/etl/transforms/module_transform.py | 1 + modules/etl/transforms/redcap_transform.py | 13 ++--- 7 files changed, 62 insertions(+), 35 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 9394961e..7394dabe 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -79,7 +79,6 @@ def validate_is_valid_email(instance): email_address = instance try: validate_email(email_address, check_deliverability=False) - return True except EmailNotValidError as e: raise ValidationError("Invalid email address format") from e @@ -184,7 +183,7 @@ def validate_is_valid_email(instance): email_address = instance try: - validate_email(email_address) + # validate_email(email_address) return True except EmailNotValidError as e: raise ValidationError("Invalid email address format") from e diff --git a/apis/dashboard.py b/apis/dashboard.py index ed54f43a..4522dbff 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -10,7 +10,7 @@ import model from caching import cache from modules.etl import ModuleTransform, RedcapTransform -from modules.etl.config import transformConfigs +from modules.etl.config import redcapTransformConfig, moduleTransformConfigs from .authentication import is_granted @@ -27,7 +27,7 @@ "subgroup": fields.String( required=False, readonly=True, description="Subgroup field" ), - "value": fields.Integer( + "value": fields.Raw( required=False, readonly=True, description="Value field" ), "x": fields.Raw(required=False, readonly=True, description="X-axis field"), @@ -352,28 +352,27 @@ def get(self, study_id: int): # Set report_ids for ETL for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(transformConfigs["redcap"]["reports"]): + for i, report_config in enumerate(redcapTransformConfig["reports"]): if ( report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - transformConfigs["redcap"]["reports"][i]["kwdargs"] |= { - "report_id": report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"] |= { + "report_id": report["report_id"], } # Structure REDCap ETL Config redcap_etl_config = { "redcap_api_url": redcap_project_view["project_api_url"], "redcap_api_key": redcap_project_view["project_api_key"], - } | transformConfigs["redcap"] + } | redcapTransformConfig redcapTransform = RedcapTransform(redcap_etl_config) mergedTransform = redcapTransform.merged # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["dashboard_modules"]: - print(dashboard_module) - transform, module_etl_config = transformConfigs[dashboard_module["id"]] + transform, module_etl_config = moduleTransformConfigs[dashboard_module["id"]] transformed = getattr(ModuleTransform(module_etl_config), transform)( mergedTransform ).transformed diff --git a/app.py b/app.py index 9bc8ed71..bc2cd4ef 100644 --- a/app.py +++ b/app.py @@ -70,8 +70,10 @@ def create_app(config_module=None): resources={ "/*": { "origins": [ - "http://localhost:3000", - "https://localhost:3000", + # "http://localhost:3000", + # "https://localhost:3000", + "http://localhost:5173", + "https://localhost:5173", "https:\/\/brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://fairhub.io", ], diff --git a/modules/etl/config/__init__.py b/modules/etl/config/__init__.py index 0ce9700f..b3a0c6ed 100644 --- a/modules/etl/config/__init__.py +++ b/modules/etl/config/__init__.py @@ -8,4 +8,4 @@ phenotypeSexBySiteTransformConfig, currentMedicationsBySiteTransformConfig, ) -from .aireadi_config import transformConfigs +from .aireadi_config import moduleTransformConfigs diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 751f7541..a4d9bb11 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -57,6 +57,7 @@ "treatments", "scrweek", "scryear", + "scrweekyear", ] # Survey Column Groups @@ -133,6 +134,17 @@ "cmtrt_lfst": "Lifestyle Management", } +# +# REDCap Report Merge Map +# + +redcap_report_merge_map: Dict[str, Dict[str, Any]] = { + "participants-list": {"on": index_columns, "how": "inner"}, + "participant-value": {"on": index_columns, "how": "inner"}, + "instrument-status": {"on": index_columns, "how": "inner"}, + "repeat-instrument": {"on": index_columns, "how": "outer"}, +} + # # REDCap Transform Config # @@ -167,6 +179,16 @@ "missing_value": missing_value_generic, } ), + ( + "transform_values_by_column", + { + "column": "scrcmpdat", + "new_column_name": "scrweekyear", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: (int(datetime.strptime(x, "%Y-%m-%d").isocalendar().week), int(datetime.strptime(x, "%Y-%m-%d").isocalendar().year)), + "missing_value": missing_value_generic, + } + ), ( "new_column_from_binary_columns_positive_class", { @@ -210,7 +232,7 @@ ("drop_rows", {"columns": repeat_survey_columns}), ( "aggregate_repeat_instrument_by_index", - {"aggregator": np.max, "dtype": str}, + {"aggregator": "max", "dtype": str}, ), ( "keep_columns", @@ -220,12 +242,11 @@ }, ], "post_transform_merge": ( - "participant-value", + index_columns, [ - # ("participant-value", {"on": index_columns, "how": "inner"}), + ("participant-value", {"on": index_columns, "how": "inner"}), ("instrument-status", {"on": index_columns, "how": "inner"}), ("repeat-instrument", {"on": index_columns, "how": "outer"}), - # ("repeat-instrument", {"on": index_columns, "how": "outer"}), ], ), "post_merge_transforms": [ @@ -2078,11 +2099,11 @@ "transforms": [ { "name": "Current Medications by Site", - "vtype": "SingleCategorical", + "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid"], - "value": "current_medications", + "groups": ["siteid", "current_medications", "scrsex"], + "value": "record_id", "func": "count", } ], @@ -2094,20 +2115,26 @@ "astype": str, }, "group": { - "name": "Site", - "field": "siteid", + "name": "Current Medication Count", + "field": "current_medications", "missing_value": missing_value_generic, "astype": str, }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, "color": { - "name": "Site", - "field": "siteid", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, "value": { - "name": "Current Medications (N)", - "field": "current_medications", + "name": "Participants (N)", + "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, @@ -2117,9 +2144,7 @@ }, ) - -transformConfigs: Dict[str, Any] = { - "redcap": redcapTransformConfig, +moduleTransformConfigs: Dict[str, Any] = { "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index e127f896..5d3ce752 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -14,6 +14,7 @@ def __init__( config: Dict[str, Any], logging_config: Dict[str, str] = {}, ) -> None: + # # Logging # diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 88827a79..e16b224a 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -31,7 +31,7 @@ def __init__(self, config: dict) -> None: # Report Merging self.post_transform_merge = ( - config["post_transform_merge"] if "post_transform_merge" in config else [] + config["post_transform_merge"] if "post_transform_merge" in config else ([], []) ) # Post Merge Transforms @@ -158,8 +158,8 @@ def __init__(self, config: dict) -> None: # Merge Reports self.logger.info(f"Merging REDCap reports") - receiving_report_key, merge_steps = self.post_transform_merge - self.merged = self._merge_reports(receiving_report_key, merge_steps) + index_columns, merge_steps = self.post_transform_merge + self.merged = self._merge_reports(index_columns, merge_steps) # Apply Post-Merge Transforms self.logger.info(f"Applying REDCap report post-merge transforms") @@ -565,7 +565,7 @@ def _aggregate_repeat_instrument_by_index( aggfunc=aggregator, fill_value=self.missing_value_generic, ) - df = df.merge(pivot, how="inner", on=self.index_columns) + df = df.merge(pivot, how="outer", on=self.index_columns) df = df.drop_duplicates(self.index_columns, keep="first") for column in new_columns: df[column] = df[column].astype(dtype) @@ -684,14 +684,15 @@ def new_column_from_binary_columns_negative_class( def _merge_reports( self, - receiving_report_key: str, + index_columns: List[str], merge_steps: List[Tuple[str, Dict[str, Any]]], ) -> pd.DataFrame: """ Performs N - 1 merge transforms on N reports. """ - df_receiving_report = self.reports[receiving_report_key]["transformed"] + receiving_report_key, _ = merge_steps[0] + df_receiving_report = self.reports[receiving_report_key]["transformed"][index_columns] if len(merge_steps) > 0: for providing_report_key, merge_kwdargs in merge_steps: From 300b156b4639b6f40f28981507f7b08a8fcd9268 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 19 Jan 2024 18:12:25 -0800 Subject: [PATCH 400/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 +- apis/dashboard.py | 16 ++++++------- modules/etl/config/aireadi_config.py | 36 +++++++++++++++++++++------- 3 files changed, 36 insertions(+), 18 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 7394dabe..c1be0111 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -183,7 +183,7 @@ def validate_is_valid_email(instance): email_address = instance try: - # validate_email(email_address) + validate_email(email_address) return True except EmailNotValidError as e: raise ValidationError("Invalid email address format") from e diff --git a/apis/dashboard.py b/apis/dashboard.py index 4522dbff..6db7e376 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -10,7 +10,7 @@ import model from caching import cache from modules.etl import ModuleTransform, RedcapTransform -from modules.etl.config import redcapTransformConfig, moduleTransformConfigs +from modules.etl.config import moduleTransformConfigs, redcapTransformConfig from .authentication import is_granted @@ -27,9 +27,7 @@ "subgroup": fields.String( required=False, readonly=True, description="Subgroup field" ), - "value": fields.Raw( - required=False, readonly=True, description="Value field" - ), + "value": fields.Raw(required=False, readonly=True, description="Value field"), "x": fields.Raw(required=False, readonly=True, description="X-axis field"), "y": fields.Float(required=False, readonly=True, description="Y-axis field"), "datetime": fields.String( @@ -357,9 +355,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"] |= { - "report_id": report["report_id"], - } + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -372,7 +370,9 @@ def get(self, study_id: int): # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["dashboard_modules"]: - transform, module_etl_config = moduleTransformConfigs[dashboard_module["id"]] + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] transformed = getattr(ModuleTransform(module_etl_config), transform)( mergedTransform ).transformed diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 8613377c..ae7eec51 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -151,11 +151,17 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. -redcapTransformConfig: Dict[str, List[Any] | Tuple[str, List[Any]] | str | List] = { - "reports": [ +redcapTransformConfig: Dict[str, Any] = { + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] { "key": "participant-value", - "kwdargs": {}, + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "" + }, "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), ("map_missing_values_by_columns", {"columns": data_columns}), @@ -219,7 +225,13 @@ }, { "key": "instrument-status", - "kwdargs": {}, + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "" + }, "transforms": [ ( "remap_values_by_columns", @@ -231,7 +243,13 @@ }, { "key": "repeat-instrument", - "kwdargs": {}, + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "" + }, "transforms": [ ("drop_rows", {"columns": repeat_survey_columns}), ( @@ -245,7 +263,7 @@ ], }, ], - "post_transform_merge": ( + "post_transform_merge": ( # Dict[str, Tuple[List[str], List[Tuple[str, Any]]]] index_columns, [ ("participant-value", {"on": index_columns, "how": "inner"}), @@ -253,15 +271,15 @@ ("repeat-instrument", {"on": index_columns, "how": "outer"}), ], ), - "post_merge_transforms": [ + "post_merge_transforms": [ # Dict[str, Tuple[str, Dict[str, List[Any]]]] ( "remap_values_by_columns", {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, ), ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), ], - "index_columns": ["record_id"], - "missing_value_generic": missing_value_generic, + "index_columns": ["record_id"], # Dict[str, List[str]] + "missing_value_generic": missing_value_generic, # Dict[str, str] } # From acc9f5ffe3a13ed56ea5334761e3797ddd8ced37 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 19 Jan 2024 18:14:18 -0800 Subject: [PATCH 401/505] no message --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index a78d352e..6e1a5b44 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,9 @@ coverage # Environment variables .env +# Virutal Environments +.venv + # Database postgres_data/* postgres-data/* From a2678da71f593a24cb7f4851e25609919ff1becf Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sat, 20 Jan 2024 02:29:08 +0000 Subject: [PATCH 402/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/config/aireadi_config.py | 25 ++++++++++++---------- modules/etl/transforms/module_transform.py | 1 - modules/etl/transforms/redcap_transform.py | 8 +++++-- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index ae7eec51..56b9e289 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -152,7 +152,7 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. redcapTransformConfig: Dict[str, Any] = { - "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] { "key": "participant-value", "kwdargs": { @@ -160,7 +160,7 @@ "raw_or_label_headers": "raw", "export_checkbox_labels": False, "csv_delimiter": "\t", - "report_id": "" + "report_id": "", }, "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), @@ -195,9 +195,12 @@ "column": "scrcmpdat", "new_column_name": "scrweekyear", # ISO 8601 string format token for front-end: %Y - "transform": lambda x: (int(datetime.strptime(x, "%Y-%m-%d").isocalendar().week), int(datetime.strptime(x, "%Y-%m-%d").isocalendar().year)), + "transform": lambda x: ( + int(datetime.strptime(x, "%Y-%m-%d").isocalendar().week), + int(datetime.strptime(x, "%Y-%m-%d").isocalendar().year), + ), "missing_value": missing_value_generic, - } + }, ), ( "new_column_from_binary_columns_positive_class", @@ -230,7 +233,7 @@ "raw_or_label_headers": "raw", "export_checkbox_labels": False, "csv_delimiter": "\t", - "report_id": "" + "report_id": "", }, "transforms": [ ( @@ -248,7 +251,7 @@ "raw_or_label_headers": "raw", "export_checkbox_labels": False, "csv_delimiter": "\t", - "report_id": "" + "report_id": "", }, "transforms": [ ("drop_rows", {"columns": repeat_survey_columns}), @@ -263,7 +266,7 @@ ], }, ], - "post_transform_merge": ( # Dict[str, Tuple[List[str], List[Tuple[str, Any]]]] + "post_transform_merge": ( # Dict[str, Tuple[List[str], List[Tuple[str, Any]]]] index_columns, [ ("participant-value", {"on": index_columns, "how": "inner"}), @@ -271,15 +274,15 @@ ("repeat-instrument", {"on": index_columns, "how": "outer"}), ], ), - "post_merge_transforms": [ # Dict[str, Tuple[str, Dict[str, List[Any]]]] + "post_merge_transforms": [ # Dict[str, Tuple[str, Dict[str, List[Any]]]] ( "remap_values_by_columns", {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, ), ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), ], - "index_columns": ["record_id"], # Dict[str, List[str]] - "missing_value_generic": missing_value_generic, # Dict[str, str] + "index_columns": ["record_id"], # Dict[str, List[str]] + "missing_value_generic": missing_value_generic, # Dict[str, str] } # @@ -2147,7 +2150,7 @@ "field": "scrsex", "missing_value": missing_value_generic, "astype": str, - }, + }, "color": { "name": "Sex", "field": "scrsex", diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index 5d3ce752..e127f896 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -14,7 +14,6 @@ def __init__( config: Dict[str, Any], logging_config: Dict[str, str] = {}, ) -> None: - # # Logging # diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index cb446030..a3bee7ec 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -31,7 +31,9 @@ def __init__(self, config: dict) -> None: # Report Merging self.post_transform_merge = ( - config["post_transform_merge"] if "post_transform_merge" in config else ([], []) + config["post_transform_merge"] + if "post_transform_merge" in config + else ([], []) ) # Post Merge Transforms @@ -701,7 +703,9 @@ def _merge_reports( """ receiving_report_key, _ = merge_steps[0] - df_receiving_report = self.reports[receiving_report_key]["transformed"][index_columns] + df_receiving_report = self.reports[receiving_report_key]["transformed"][ + index_columns + ] if len(merge_steps) > 0: for providing_report_key, merge_kwdargs in merge_steps: From 7c8f9a10a97a78bd0acf03df0cdde9ed9977efd4 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 19 Jan 2024 18:12:25 -0800 Subject: [PATCH 403/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 2 +- apis/dashboard.py | 16 ++++++------- modules/etl/config/aireadi_config.py | 36 +++++++++++++++++++++------- 3 files changed, 36 insertions(+), 18 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 7394dabe..c1be0111 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -183,7 +183,7 @@ def validate_is_valid_email(instance): email_address = instance try: - # validate_email(email_address) + validate_email(email_address) return True except EmailNotValidError as e: raise ValidationError("Invalid email address format") from e diff --git a/apis/dashboard.py b/apis/dashboard.py index 4522dbff..6db7e376 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -10,7 +10,7 @@ import model from caching import cache from modules.etl import ModuleTransform, RedcapTransform -from modules.etl.config import redcapTransformConfig, moduleTransformConfigs +from modules.etl.config import moduleTransformConfigs, redcapTransformConfig from .authentication import is_granted @@ -27,9 +27,7 @@ "subgroup": fields.String( required=False, readonly=True, description="Subgroup field" ), - "value": fields.Raw( - required=False, readonly=True, description="Value field" - ), + "value": fields.Raw(required=False, readonly=True, description="Value field"), "x": fields.Raw(required=False, readonly=True, description="X-axis field"), "y": fields.Float(required=False, readonly=True, description="Y-axis field"), "datetime": fields.String( @@ -357,9 +355,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"] |= { - "report_id": report["report_id"], - } + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -372,7 +370,9 @@ def get(self, study_id: int): # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["dashboard_modules"]: - transform, module_etl_config = moduleTransformConfigs[dashboard_module["id"]] + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] transformed = getattr(ModuleTransform(module_etl_config), transform)( mergedTransform ).transformed diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 8613377c..ae7eec51 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -151,11 +151,17 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. -redcapTransformConfig: Dict[str, List[Any] | Tuple[str, List[Any]] | str | List] = { - "reports": [ +redcapTransformConfig: Dict[str, Any] = { + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] { "key": "participant-value", - "kwdargs": {}, + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "" + }, "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), ("map_missing_values_by_columns", {"columns": data_columns}), @@ -219,7 +225,13 @@ }, { "key": "instrument-status", - "kwdargs": {}, + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "" + }, "transforms": [ ( "remap_values_by_columns", @@ -231,7 +243,13 @@ }, { "key": "repeat-instrument", - "kwdargs": {}, + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "" + }, "transforms": [ ("drop_rows", {"columns": repeat_survey_columns}), ( @@ -245,7 +263,7 @@ ], }, ], - "post_transform_merge": ( + "post_transform_merge": ( # Dict[str, Tuple[List[str], List[Tuple[str, Any]]]] index_columns, [ ("participant-value", {"on": index_columns, "how": "inner"}), @@ -253,15 +271,15 @@ ("repeat-instrument", {"on": index_columns, "how": "outer"}), ], ), - "post_merge_transforms": [ + "post_merge_transforms": [ # Dict[str, Tuple[str, Dict[str, List[Any]]]] ( "remap_values_by_columns", {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, ), ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), ], - "index_columns": ["record_id"], - "missing_value_generic": missing_value_generic, + "index_columns": ["record_id"], # Dict[str, List[str]] + "missing_value_generic": missing_value_generic, # Dict[str, str] } # From b7726af269a73365074808643ebd66fd472f323a Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sat, 20 Jan 2024 02:33:51 +0000 Subject: [PATCH 404/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/config/aireadi_config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index f1074ac8..04bb7687 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -152,7 +152,7 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. redcapTransformConfig: Dict[str, Any] = { - "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] { "key": "participant-value", "kwdargs": { @@ -160,7 +160,7 @@ "raw_or_label_headers": "raw", "export_checkbox_labels": False, "csv_delimiter": "\t", - "report_id": "" + "report_id": "", }, "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), From 402574418a81a581aa8f168b10be532fe7a12b67 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Thu, 25 Jan 2024 16:13:57 -0800 Subject: [PATCH 405/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env.example | 11 ++++++++--- README.md | 14 ++++++++++++++ apis/__init__.py | 7 +++++++ apis/dashboard.py | 11 ++++------- app.py | 20 +++++++++----------- 5 files changed, 42 insertions(+), 21 deletions(-) diff --git a/.env.example b/.env.example index a5fa42a7..dcecd1b8 100644 --- a/.env.example +++ b/.env.example @@ -1,7 +1,12 @@ FAIRHUB_DATABASE_URL="postgresql://admin:root@localhost:5432/fairhub_local" FAIRHUB_SECRET="AddAny32+CharacterCountWordHereAsYourSecret" - FAIRHUB_AZURE_READ_SAS_TOKEN= FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME= - -FAIRHUB_GROWTHBOOK_CLIENT_KEY= \ No newline at end of file +FAIRHUB_GROWTHBOOK_CLIENT_KEY= +FAIRHUB_CACHE_DEFAULT_TIMEOUT=86400 +FAIRHUB_CACHE_KEY_PREFIX=fairhub-io# +FAIRHUB_CACHE_HOST=localhost +FAIRHUB_CACHE_PORT=6379 +FAIRHUB_CACHE_TYPE=RedisCache +FAIRHUB_CACHE_DB=0 +FAIRHUB_CACHE_URL=redis://127.0.0.1:6379 diff --git a/README.md b/README.md index cf8ed8ed..dd7ec5a7 100644 --- a/README.md +++ b/README.md @@ -98,6 +98,20 @@ Close the database with: docker-compose -f ./db-docker-compose.yaml down -v ``` +## Running + +For developer mode: + +```bash +flask run --debug +``` + +For production mode: + +```bash +python3 app.py --host $HOST --port $PORT +``` + ## License This work is licensed under diff --git a/apis/__init__.py b/apis/__init__.py index 3c48bc8a..fbee9508 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -6,6 +6,7 @@ from .authentication import api as authentication from .contributor import api as contributors_api +from .dashboard import api as dashboard from .dataset import api as dataset_api from .dataset_metadata.dataset_access import api as access from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier @@ -23,6 +24,7 @@ from .dataset_metadata.dataset_title import api as title from .file import api as file_api from .participant import api as participants_api +from .redcap import api as redcap from .study import api as study_api from .study_metadata.study_arm import api as arm from .study_metadata.study_available_ipd import api as available_ipd @@ -89,6 +91,8 @@ "identification", "study_description", "dataset_contributor", + "redcap", + "dashboard", "utils", ] @@ -117,3 +121,6 @@ def get(self): api.add_namespace(contributors_api) api.add_namespace(user) api.add_namespace(utils) +api.add_namespace(redcap) +api.add_namespace(dashboard) + diff --git a/apis/dashboard.py b/apis/dashboard.py index 6db7e376..822abd9e 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -183,7 +183,7 @@ class RedcapProjectDashboards(Resource): @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model, as_list=True) def get(self, study_id: int): - """Get all REDCap project dashboard""" + """Get all REDCap project dashboards""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not modify", 403 @@ -199,6 +199,7 @@ def get(self, study_id: int): @api.route("/study//dashboard/add") class AddRedcapProjectDashboard(Resource): + @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @@ -299,14 +300,13 @@ def post(self, study_id: int): @api.route("/study//dashboard-connector") class RedcapProjectDashboardConnector(Resource): - """Get REDCap project dashboard connector""" @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_connector_model) def get(self, study_id: int): - """Get Study Redcap Project Dashboard""" + """Get REDCap project dashboard connector""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not get this dashboard", 403 @@ -323,7 +323,6 @@ def get(self, study_id: int): @api.route("/study//dashboard") class RedcapProjectDashboard(Resource): - """Get REDCap project dashboard""" @api.doc(parser=dashboard_parser) @api.response(200, "Success") @@ -331,7 +330,7 @@ class RedcapProjectDashboard(Resource): @api.marshal_with(redcap_project_dashboard_model) @cache.cached(query_string=True) def get(self, study_id: int): - """Get Study Redcap Project Dashboard""" + """Get REDCap project dashboard""" study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not get this dashboard", 403 @@ -386,8 +385,6 @@ def get(self, study_id: int): @api.route("/study//dashboard/edit") class EditRedcapProjectDashboard(Resource): - """Edit REDCap project dashboard""" - @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") diff --git a/app.py b/app.py index 578272e4..a30f0a24 100644 --- a/app.py +++ b/app.py @@ -12,6 +12,7 @@ from growthbook import GrowthBook from sqlalchemy import MetaData from waitress import serve +from caching import cache import config import model @@ -63,6 +64,7 @@ def create_app(config_module=None): model.db.init_app(app) api.init_app(app) bcrypt.init_app(app) + cache.init_app(app) cors_origins = [ "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string @@ -71,10 +73,10 @@ def create_app(config_module=None): "https://staging.fairhub.io", "https://fairhub.io", ] - + print(app.debug) if app.debug: - cors_origins.extend(["http://localhost:3000", "http://localhost:5000"]) - + cors_origins.extend(["http://localhost:3000"]) + print(cors_origins) # Only allow CORS origin for localhost:3000 # and any subdomain of azurestaticapps.net/ CORS( @@ -95,13 +97,11 @@ def create_app(config_module=None): # app.config[ # "CORS_ALLOW_HEADERS" - # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, - # Access-Control-Allow-Credentials" - # app.config["CORS_SUPPORTS_CREDENTIALS"] = True + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" # app.config[ # "CORS_EXPOSE_HEADERS" - # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, - # Access-Control-Allow-Credentials" + # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" + # app.config["CORS_SUPPORTS_CREDENTIALS"] = True # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) @@ -215,7 +215,6 @@ def on_after_request(resp): # "Access-Control-Expose-Headers" # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" - app.logger.info(resp.headers) return resp @@ -255,12 +254,11 @@ def destroy_schema(): parser = ArgumentParser() parser.add_argument( - "-p", "--port", default=5000, type=int, help="port to listen on" + "-p", "--port", default=5000, type=int, help="Port to listen on" ) args = parser.parse_args() port = args.port flask_app = create_app() - # flask_app.run(host="0.0.0.0", port=port) serve(flask_app, port=port) From 3f56c58f025699a55c61ab65b94b614ada2f0a9e Mon Sep 17 00:00:00 2001 From: Lint Action Date: Fri, 26 Jan 2024 00:14:22 +0000 Subject: [PATCH 406/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/__init__.py | 1 - apis/dashboard.py | 3 --- 2 files changed, 4 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index fbee9508..e2fc9817 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -123,4 +123,3 @@ def get(self): api.add_namespace(utils) api.add_namespace(redcap) api.add_namespace(dashboard) - diff --git a/apis/dashboard.py b/apis/dashboard.py index 822abd9e..73608c6d 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -199,7 +199,6 @@ def get(self, study_id: int): @api.route("/study//dashboard/add") class AddRedcapProjectDashboard(Resource): - @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @@ -300,7 +299,6 @@ def post(self, study_id: int): @api.route("/study//dashboard-connector") class RedcapProjectDashboardConnector(Resource): - @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") @@ -323,7 +321,6 @@ def get(self, study_id: int): @api.route("/study//dashboard") class RedcapProjectDashboard(Resource): - @api.doc(parser=dashboard_parser) @api.response(200, "Success") @api.response(400, "Validation Error") From 95fb942537d247d56249448a69a9affd458e86de Mon Sep 17 00:00:00 2001 From: Greenstick Date: Sun, 4 Feb 2024 15:09:04 -0800 Subject: [PATCH 407/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 11 +- app.py | 3 +- modules/etl/config/aireadi_config.py | 456 +++++++++++++-------------- 3 files changed, 237 insertions(+), 233 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 73608c6d..26db0027 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -23,7 +23,7 @@ required=True, readonly=True, description="Filterby field" ), "group": fields.String(required=True, readonly=True, description="Group field"), - "color": fields.String(required=True, readonly=True, description="Color field"), + # "color": fields.String(required=True, readonly=True, description="Color field"), "subgroup": fields.String( required=False, readonly=True, description="Subgroup field" ), @@ -345,6 +345,7 @@ def get(self, study_id: int): redcap_project_view = redcap_project_view.to_dict() # Set report_ids for ETL + print("reports-pre", redcap_project_dashboard["reports"]) for report in redcap_project_dashboard["reports"]: for i, report_config in enumerate(redcapTransformConfig["reports"]): if ( @@ -354,7 +355,7 @@ def get(self, study_id: int): redcapTransformConfig["reports"][i]["kwdargs"][ "report_id" ] = report["report_id"] - + print("reports-post", redcap_project_dashboard["reports"]) # Structure REDCap ETL Config redcap_etl_config = { "redcap_api_url": redcap_project_view["project_api_url"], @@ -369,6 +370,7 @@ def get(self, study_id: int): transform, module_etl_config = moduleTransformConfigs[ dashboard_module["id"] ] + print(transform) transformed = getattr(ModuleTransform(module_etl_config), transform)( mergedTransform ).transformed @@ -479,11 +481,14 @@ def put(self, study_id: int): ) # Clear Redis Cache # TODO: We want to clear the cache by dashboard_id/cache key, not the whole cache! - cache.clear() + cache.clear(); update_redcap_project_dashboard_query = ( model.StudyRedcapProjectDashboard.query.get(data["dashboard_id"]) ) + print("data-update", data) + print("pre-update", update_redcap_project_dashboard_query.to_dict()) update_redcap_project_dashboard_query.update(data) + print("post-update", update_redcap_project_dashboard_query.to_dict()) model.db.session.commit() update_redcap_project_dashboard: Dict[ str, Any diff --git a/app.py b/app.py index a30f0a24..7ac1f4c0 100644 --- a/app.py +++ b/app.py @@ -73,10 +73,9 @@ def create_app(config_module=None): "https://staging.fairhub.io", "https://fairhub.io", ] - print(app.debug) if app.debug: cors_origins.extend(["http://localhost:3000"]) - print(cors_origins) + # Only allow CORS origin for localhost:3000 # and any subdomain of azurestaticapps.net/ CORS( diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 04bb7687..fc1c0619 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -326,12 +326,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Recruitment Survey", + # "field": "recruitment_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -370,12 +370,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "FAQ Survey", + # "field": "faq_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -414,12 +414,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Screening Survey", + # "field": "screening_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -458,12 +458,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Preconsent Survey", + # "field": "preconsent_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -502,12 +502,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Consent Survey", + # "field": "consent_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -549,12 +549,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Staff Consent Attestation Survey", + # "field": "staff_consent_attestation_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -593,12 +593,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Demographics Survey", + # "field": "demographics_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -637,12 +637,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Health Survey", + # "field": "health_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -681,12 +681,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Substance Use Survey", + # "field": "substance_use_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -725,12 +725,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "CES-D-10 Survey", + # "field": "cesd10_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -769,12 +769,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "PAID-5 DM Survey", + # "field": "paid5_dm_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -813,12 +813,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Diabetes Survey", + # "field": "diabetes_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -857,12 +857,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Dietary Survey", + # "field": "dietary_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -901,12 +901,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Opthalmic Survey", + # "field": "ophthalmic_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -945,12 +945,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "PhenX SDOH Combined Survey", + # "field": "px_sdoh_combined_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -989,12 +989,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "PhenX Food Insecurity Survey", + # "field": "px_food_insecurity_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1036,12 +1036,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "PhenX Neighborhood Environment Survey", + # "field": "px_neighborhood_environment_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1083,12 +1083,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "PhenX Racial and Ethnic Discrimination Survey", + # "field": "px_racial_ethnic_discrimination_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1127,12 +1127,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Decline Participation Survey", + # "field": "decline_participation_survey_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1171,12 +1171,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Study Enrollment Survey", + # "field": "study_enrollment_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1215,12 +1215,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Driving Record", + # "field": "driving_record_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1259,12 +1259,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Device Distribution", + # "field": "device_distribution_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1303,12 +1303,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Medications Assessment", + # "field": "meds_assessment_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1347,12 +1347,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Physical Assessment", - "field": "physical_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Physical Assessment", + # "field": "physical_assessment_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1391,12 +1391,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "BCVA", + # "field": "bcva_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1435,12 +1435,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Photopic MARS", + # "field": "photopic_mars_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1479,12 +1479,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Mesopic MARS", + # "field": "mesopic_mars_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1523,12 +1523,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Monofilament", + # "field": "monofilament_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1567,12 +1567,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "MOCA", - "field": "moca_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "MOCA", + # "field": "moca_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1611,12 +1611,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "ECG Survey", + # "field": "ecg_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1655,12 +1655,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Lab Results Survey", + # "field": "lab_results_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1699,12 +1699,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Specimen Management", + # "field": "specimen_management_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1743,12 +1743,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Device Return", + # "field": "device_return_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1787,12 +1787,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Disposition Survey", - "field": "disposition_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Disposition Survey", + # "field": "disposition_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1831,12 +1831,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Data Management Survey", + # "field": "data_management_complete", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1879,12 +1879,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Race", + # "field": "race", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "x": { "name": "Week of the Year", "field": "scrweek", @@ -1933,12 +1933,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Phenotype", + # "field": "phenotypes", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "x": { "name": "Week of the Year", "field": "scrweek", @@ -1992,12 +1992,12 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, + # "color": { + # "name": "Race", + # "field": "race", + # "missing_value": missing_value_generic, + # "astype": str, + # }, "value": { "name": "Count (N)", "field": "record_id", From f6f9fe4478b6c43854b91c757935e65208464fc7 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sun, 4 Feb 2024 23:09:30 +0000 Subject: [PATCH 408/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../0defbfc71c59_add_identifier_to_version.py | 1 + .../29e42ce4be3f_adding_license_text.py | 1 + ...2ac2b020c7c_delete_dataset_readme_table.py | 1 + apis/__init__.py | 1 + apis/authentication.py | 1 + apis/dashboard.py | 21 +++--- .../dataset_alternate_identifier.py | 1 + apis/dataset_metadata/dataset_consent.py | 1 + apis/dataset_metadata/dataset_contributor.py | 1 + apis/dataset_metadata/dataset_date.py | 1 + apis/dataset_metadata/dataset_funder.py | 1 + apis/dataset_metadata/dataset_other.py | 1 + apis/dataset_metadata/dataset_record_keys.py | 1 + apis/dataset_metadata/dataset_related_item.py | 1 + apis/dataset_metadata/dataset_title.py | 1 + apis/file.py | 1 + apis/redcap.py | 1 + apis/study_metadata/study_arm.py | 1 + apis/study_metadata/study_available_ipd.py | 1 + apis/study_metadata/study_contact.py | 1 + apis/study_metadata/study_description.py | 1 + apis/study_metadata/study_design.py | 1 + apis/study_metadata/study_eligibility.py | 1 + apis/study_metadata/study_identification.py | 1 + apis/study_metadata/study_intervention.py | 1 + apis/study_metadata/study_ipdsharing.py | 1 + apis/study_metadata/study_link.py | 1 + apis/study_metadata/study_location.py | 1 + apis/study_metadata/study_other.py | 1 + apis/study_metadata/study_overall_official.py | 1 + apis/study_metadata/study_reference.py | 1 + .../study_sponsors_collaborators.py | 1 + apis/study_metadata/study_status.py | 1 + apis/utils.py | 1 + apis/utils_namespace.py | 1 + app.py | 1 + config.py | 1 + core/__init__.py | 1 - core/utils.py | 1 + .../dataset_metadata/dataset_related_item.py | 72 ++++++++++++------- model/study.py | 6 +- model/study_contributor.py | 6 +- model/study_metadata/identifiers.py | 22 +++--- model/study_metadata/study_eligibility.py | 6 +- model/version.py | 2 +- modules/etl/transforms/module_transform.py | 6 +- pytest_config.py | 1 + tests/conftest.py | 1 + tests/functional/test_server_launch.py | 1 + tests/functional/test_study_api.py | 1 + tests/functional/test_study_dataset_api.py | 1 + 51 files changed, 123 insertions(+), 61 deletions(-) diff --git a/alembic/versions/0defbfc71c59_add_identifier_to_version.py b/alembic/versions/0defbfc71c59_add_identifier_to_version.py index 8adc24e0..128cca30 100644 --- a/alembic/versions/0defbfc71c59_add_identifier_to_version.py +++ b/alembic/versions/0defbfc71c59_add_identifier_to_version.py @@ -5,6 +5,7 @@ Create Date: 2024-01-05 13:25:15.547450 """ + from typing import Sequence, Union from alembic import op diff --git a/alembic/versions/29e42ce4be3f_adding_license_text.py b/alembic/versions/29e42ce4be3f_adding_license_text.py index f00ead96..190db9d2 100644 --- a/alembic/versions/29e42ce4be3f_adding_license_text.py +++ b/alembic/versions/29e42ce4be3f_adding_license_text.py @@ -5,6 +5,7 @@ Create Date: 2023-12-21 13:34:26.478808 """ + from typing import Sequence, Union from alembic import op diff --git a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py index f2ea30f3..5fac771b 100644 --- a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py +++ b/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py @@ -5,6 +5,7 @@ Create Date: 2023-11-08 15:47:00.205940 """ + from typing import Sequence, Union import alembic diff --git a/apis/__init__.py b/apis/__init__.py index e2fc9817..da3a02d0 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -1,4 +1,5 @@ """Initialize the api system for the backend""" + from flask_restx import Api, Resource from apis.dataset_metadata_namespace import api as dataset_metadata_namespace diff --git a/apis/authentication.py b/apis/authentication.py index fc7432f3..941742a6 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -1,6 +1,7 @@ """This module is used to authenticate users to the system and handle few authentication errors. Also, it sets token for logged user along with expiration date""" + import datetime import importlib import os diff --git a/apis/dashboard.py b/apis/dashboard.py index 26db0027..1821943f 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -1,4 +1,5 @@ """API routes for study redcap""" + from typing import Any, Dict, List, Union from flask import request @@ -291,9 +292,9 @@ def post(self, study_id: int): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[ - str, Any - ] = connect_redcap_project_dashboard_data.to_dict() + connect_redcap_project_dashboard: Dict[str, Any] = ( + connect_redcap_project_dashboard_data.to_dict() + ) return connect_redcap_project_dashboard, 201 @@ -352,9 +353,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( + report["report_id"] + ) print("reports-post", redcap_project_dashboard["reports"]) # Structure REDCap ETL Config redcap_etl_config = { @@ -481,7 +482,7 @@ def put(self, study_id: int): ) # Clear Redis Cache # TODO: We want to clear the cache by dashboard_id/cache key, not the whole cache! - cache.clear(); + cache.clear() update_redcap_project_dashboard_query = ( model.StudyRedcapProjectDashboard.query.get(data["dashboard_id"]) ) @@ -490,9 +491,9 @@ def put(self, study_id: int): update_redcap_project_dashboard_query.update(data) print("post-update", update_redcap_project_dashboard_query.to_dict()) model.db.session.commit() - update_redcap_project_dashboard: Dict[ - str, Any - ] = update_redcap_project_dashboard_query.to_dict() + update_redcap_project_dashboard: Dict[str, Any] = ( + update_redcap_project_dashboard_query.to_dict() + ) return update_redcap_project_dashboard, 201 diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 14a1c896..5d6b5e07 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -1,4 +1,5 @@ """API endpoints for dataset alternate identifier""" + from typing import Any, Union from flask import Response, request diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py index a033273f..958822b2 100644 --- a/apis/dataset_metadata/dataset_consent.py +++ b/apis/dataset_metadata/dataset_consent.py @@ -1,4 +1,5 @@ """API for dataset consent metadata""" + from flask import request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py index 67458e06..69193f66 100644 --- a/apis/dataset_metadata/dataset_contributor.py +++ b/apis/dataset_metadata/dataset_contributor.py @@ -1,4 +1,5 @@ """API for dataset contributor metadata""" + from typing import Any, Union from flask import Response, request diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py index 96eb4652..947c356d 100644 --- a/apis/dataset_metadata/dataset_date.py +++ b/apis/dataset_metadata/dataset_date.py @@ -1,4 +1,5 @@ """APIs for dataset date metadata""" + from typing import Any, Union from flask import Response, request diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py index b6a3dece..7110e5b9 100644 --- a/apis/dataset_metadata/dataset_funder.py +++ b/apis/dataset_metadata/dataset_funder.py @@ -1,4 +1,5 @@ """API endpoints for dataset funder""" + from typing import Any, Union from flask import Response, request diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 6df0f9aa..e00f9b86 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,4 +1,5 @@ """API endpoints for other dataset metadata""" + from flask import request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_record_keys.py index 8353463a..c4146022 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_record_keys.py @@ -1,4 +1,5 @@ """API endpoints for dataset record keys""" + from flask import request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py index cc526bf6..0a409b6a 100644 --- a/apis/dataset_metadata/dataset_related_item.py +++ b/apis/dataset_metadata/dataset_related_item.py @@ -1,4 +1,5 @@ """API for dataset related item""" + from typing import Any, Union from flask import Response, request diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py index b22d02fa..b9da2382 100644 --- a/apis/dataset_metadata/dataset_title.py +++ b/apis/dataset_metadata/dataset_title.py @@ -1,4 +1,5 @@ """API for dataset title metadata""" + from typing import Any, Union from flask import Response, request diff --git a/apis/file.py b/apis/file.py index 4e8fc4e4..c2ad8d98 100644 --- a/apis/file.py +++ b/apis/file.py @@ -1,4 +1,5 @@ """APIs for study files""" + import importlib import os import uuid diff --git a/apis/redcap.py b/apis/redcap.py index b14d36cf..a3c04fa4 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -1,4 +1,5 @@ """API routes for study redcap""" + from typing import Any, Union from flask import request diff --git a/apis/study_metadata/study_arm.py b/apis/study_metadata/study_arm.py index fb37b2e8..1edf6e23 100644 --- a/apis/study_metadata/study_arm.py +++ b/apis/study_metadata/study_arm.py @@ -1,4 +1,5 @@ """API routes for study arm metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py index 711e5280..7e12f5ba 100644 --- a/apis/study_metadata/study_available_ipd.py +++ b/apis/study_metadata/study_available_ipd.py @@ -1,4 +1,5 @@ """API routes for study available ipd metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py index 4293d95c..8c02108c 100644 --- a/apis/study_metadata/study_contact.py +++ b/apis/study_metadata/study_contact.py @@ -1,4 +1,5 @@ """API routes for study contact metadata""" + import typing from email_validator import EmailNotValidError, validate_email diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 026626e6..0c181dee 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -1,4 +1,5 @@ """API routes for study description metadata""" + from flask import request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 37cd77df..3d3cf0be 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -1,4 +1,5 @@ """API routes for study design metadata""" + import typing from flask import request diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 7bd1771c..ee4d6af0 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -1,4 +1,5 @@ """API routes for study eligibility metadata""" + from flask import request from flask_restx import Resource, fields from jsonschema import ValidationError, validate diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 503e699f..01a3a0fa 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,4 +1,5 @@ """API routes for study identification metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index 3accb513..e53fa828 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -1,4 +1,5 @@ """API routes for study intervention metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py index ac49e5f5..27b7e166 100644 --- a/apis/study_metadata/study_ipdsharing.py +++ b/apis/study_metadata/study_ipdsharing.py @@ -1,4 +1,5 @@ """API routes for study ipdsharing metadata""" + import typing from flask import request diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py index cd4d802a..78cbf417 100644 --- a/apis/study_metadata/study_link.py +++ b/apis/study_metadata/study_link.py @@ -1,4 +1,5 @@ """API routes for study link metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index 232f03b4..b017e235 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -1,4 +1,5 @@ """API routes for study location metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index a96fb386..46696303 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -1,4 +1,5 @@ """API routes for study other metadata""" + import typing from flask import request diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 52345980..79eb4bb5 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -1,4 +1,5 @@ """API routes for study overall official metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py index 40a27f2b..19e1b15d 100644 --- a/apis/study_metadata/study_reference.py +++ b/apis/study_metadata/study_reference.py @@ -1,4 +1,5 @@ """API routes for study reference metadata""" + import typing from flask import Response, request diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py index 89ab8f2b..00d9885b 100644 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ b/apis/study_metadata/study_sponsors_collaborators.py @@ -1,4 +1,5 @@ """API routes for study sponsors and collaborators metadata""" + import typing from flask import request diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index cd8a9b96..d0edbfce 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -1,4 +1,5 @@ """API routes for study status metadata""" + import typing from flask import request diff --git a/apis/utils.py b/apis/utils.py index c5a164c3..4bf07817 100644 --- a/apis/utils.py +++ b/apis/utils.py @@ -1,4 +1,5 @@ """Utils Endpoints""" + from flask import request from flask_restx import Namespace, Resource diff --git a/apis/utils_namespace.py b/apis/utils_namespace.py index beb9722c..8ef2bf09 100644 --- a/apis/utils_namespace.py +++ b/apis/utils_namespace.py @@ -1,4 +1,5 @@ """Namespace for utils operations""" + from flask_restx import Namespace api = Namespace("Utils", description="utils operations", path="/") diff --git a/app.py b/app.py index 7ac1f4c0..d360dcc0 100644 --- a/app.py +++ b/app.py @@ -1,4 +1,5 @@ """Entry point for the application.""" + import datetime import importlib import logging diff --git a/config.py b/config.py index 6517a3a2..7666f32d 100644 --- a/config.py +++ b/config.py @@ -1,4 +1,5 @@ """Configuration for the application.""" + from os import environ from pathlib import Path from dotenv import dotenv_values diff --git a/core/__init__.py b/core/__init__.py index efb05b8e..b8fd69a2 100644 --- a/core/__init__.py +++ b/core/__init__.py @@ -1,4 +1,3 @@ """Core package for the application.""" - from . import utils # noqa: F401 diff --git a/core/utils.py b/core/utils.py index c847973a..55f864ee 100644 --- a/core/utils.py +++ b/core/utils.py @@ -1,4 +1,5 @@ """Utils for core""" + import requests diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py index f95db200..13b7b344 100644 --- a/model/dataset_metadata/dataset_related_item.py +++ b/model/dataset_metadata/dataset_related_item.py @@ -63,33 +63,51 @@ def to_dict(self): ], "creators": [c.to_dict() for c in creators], "contributors": [c.to_dict() for c in contributors], - "publication_year": self.dataset_related_item_other.publication_year - if self.dataset_related_item_other - else None, - "volume": self.dataset_related_item_other.volume - if self.dataset_related_item_other - else None, - "issue": self.dataset_related_item_other.issue - if self.dataset_related_item_other - else None, - "number_value": self.dataset_related_item_other.number_value - if self.dataset_related_item_other - else None, - "number_type": self.dataset_related_item_other.number_type - if self.dataset_related_item_other - else None, - "first_page": self.dataset_related_item_other.first_page - if self.dataset_related_item_other - else None, - "last_page": self.dataset_related_item_other.last_page - if self.dataset_related_item_other - else None, - "publisher": self.dataset_related_item_other.publisher - if self.dataset_related_item_other - else None, - "edition": self.dataset_related_item_other.edition - if self.dataset_related_item_other - else None, + "publication_year": ( + self.dataset_related_item_other.publication_year + if self.dataset_related_item_other + else None + ), + "volume": ( + self.dataset_related_item_other.volume + if self.dataset_related_item_other + else None + ), + "issue": ( + self.dataset_related_item_other.issue + if self.dataset_related_item_other + else None + ), + "number_value": ( + self.dataset_related_item_other.number_value + if self.dataset_related_item_other + else None + ), + "number_type": ( + self.dataset_related_item_other.number_type + if self.dataset_related_item_other + else None + ), + "first_page": ( + self.dataset_related_item_other.first_page + if self.dataset_related_item_other + else None + ), + "last_page": ( + self.dataset_related_item_other.last_page + if self.dataset_related_item_other + else None + ), + "publisher": ( + self.dataset_related_item_other.publisher + if self.dataset_related_item_other + else None + ), + "edition": ( + self.dataset_related_item_other.edition + if self.dataset_related_item_other + else None + ), "identifiers": [ i.to_dict() for i in self.dataset_related_item_identifier # type: ignore diff --git a/model/study.py b/model/study.py index 48623a77..dae5cbe3 100644 --- a/model/study.py +++ b/model/study.py @@ -171,9 +171,9 @@ def to_dict(self): "created_at": self.created_at, "updated_on": self.updated_on, "size": self.study_other.size if self.study_other else None, - "description": self.study_description.brief_summary - if self.study_description - else None, + "description": ( + self.study_description.brief_summary if self.study_description else None + ), "owner": owner.to_dict()["id"] if owner else None, "role": contributor_permission.to_dict()["role"], } diff --git a/model/study_contributor.py b/model/study_contributor.py index d277f8b4..7ba8eb21 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -30,9 +30,9 @@ def __init__(self, study: Study, user: User, permission): def to_dict(self): return { "id": self.user_id, - "name": self.user.user_details.first_name - if self.user.user_details - else None, + "name": ( + self.user.user_details.first_name if self.user.user_details else None + ), "email_address": self.user.email_address, "orcid": self.user.user_details.orcid if self.user.user_details else None, "role": self.permission, diff --git a/model/study_metadata/identifiers.py b/model/study_metadata/identifiers.py index cf9e14fa..9f7842ed 100644 --- a/model/study_metadata/identifiers.py +++ b/model/study_metadata/identifiers.py @@ -13,20 +13,22 @@ def to_dict(self): key=lambda i: i.created_at, ) return { - "primary": [ - identifier - for identifier in sorted_study_identifications - if not identifier.secondary - ][0].to_dict() - if len( + "primary": ( [ identifier for identifier in sorted_study_identifications if not identifier.secondary - ] - ) - != 0 # noqa: W503 - else None, + ][0].to_dict() + if len( + [ + identifier + for identifier in sorted_study_identifications + if not identifier.secondary + ] + ) + != 0 # noqa: W503 + else None + ), "secondary": [ identifier.to_dict() for identifier in sorted_study_identifications diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 781c8cfc..1a636819 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -62,9 +62,9 @@ def to_dict(self): "exclusion_criteria": self.exclusion_criteria, "study_population": self.study_population, "sampling_method": self.sampling_method, - "study_type": self.study.study_design.study_type - if self.study.study_design - else None, + "study_type": ( + self.study.study_design.study_type if self.study.study_design else None + ), } def to_dict_metadata(self): diff --git a/model/version.py b/model/version.py index 6cfbfa83..cccb707c 100644 --- a/model/version.py +++ b/model/version.py @@ -69,7 +69,7 @@ def to_dict(self): "identifier": self.identifier, "doi": self.doi, "published": self.published, - "readme": self.version_readme.content if self.version_readme else "" + "readme": self.version_readme.content if self.version_readme else "", # "participants": [p.id for p in self.participants] # if isinstance(self.participants, (list, set)) # else [], diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index e127f896..3c0d29a6 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -173,9 +173,9 @@ def simpleTransform(self, df: pd.DataFrame) -> object: One transform for one VType. """ self.transformed = [] - transform: Dict[ - str, Any - ] = self.transforms.pop() # simple transforms have only one transform object + transform: Dict[str, Any] = ( + self.transforms.pop() + ) # simple transforms have only one transform object name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), diff --git a/pytest_config.py b/pytest_config.py index 1abce106..7fb99501 100644 --- a/pytest_config.py +++ b/pytest_config.py @@ -1,4 +1,5 @@ """Configuration for testing the application.""" + from os import environ from dotenv import dotenv_values diff --git a/tests/conftest.py b/tests/conftest.py index 4f8be7aa..355749e2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ """Defines fixtures available to all tests.""" + import json import os import unittest.mock diff --git a/tests/functional/test_server_launch.py b/tests/functional/test_server_launch.py index 7121b9d6..c97fe78f 100644 --- a/tests/functional/test_server_launch.py +++ b/tests/functional/test_server_launch.py @@ -1,4 +1,5 @@ """Tests for API endpoints related to server launch""" + import json diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index 2b9ce2e6..d70988a2 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -1,4 +1,5 @@ """Tests for API endpoints related to studies""" + import json import pytest diff --git a/tests/functional/test_study_dataset_api.py b/tests/functional/test_study_dataset_api.py index c77d2562..42fe42e3 100644 --- a/tests/functional/test_study_dataset_api.py +++ b/tests/functional/test_study_dataset_api.py @@ -1,4 +1,5 @@ """Tests for API endpoints related to datasets""" + import json import pytest From 8ccd04a0ac901b68a8ec1140327868e141b4290f Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 5 Feb 2024 13:03:59 -0800 Subject: [PATCH 409/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20add=20host=20arg?= =?UTF-8?q?=20to=20app.py?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 5 ++--- app.py | 10 +++++++--- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 1821943f..71c2c9b4 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -346,7 +346,6 @@ def get(self, study_id: int): redcap_project_view = redcap_project_view.to_dict() # Set report_ids for ETL - print("reports-pre", redcap_project_dashboard["reports"]) for report in redcap_project_dashboard["reports"]: for i, report_config in enumerate(redcapTransformConfig["reports"]): if ( @@ -356,7 +355,7 @@ def get(self, study_id: int): redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( report["report_id"] ) - print("reports-post", redcap_project_dashboard["reports"]) + # Structure REDCap ETL Config redcap_etl_config = { "redcap_api_url": redcap_project_view["project_api_url"], @@ -371,7 +370,7 @@ def get(self, study_id: int): transform, module_etl_config = moduleTransformConfigs[ dashboard_module["id"] ] - print(transform) + print(transform, module_etl_config) transformed = getattr(ModuleTransform(module_etl_config), transform)( mergedTransform ).transformed diff --git a/app.py b/app.py index d360dcc0..8a13c15f 100644 --- a/app.py +++ b/app.py @@ -75,7 +75,7 @@ def create_app(config_module=None): "https://fairhub.io", ] if app.debug: - cors_origins.extend(["http://localhost:3000"]) + cors_origins.extend(["http://localhost:3000", "http://127.0.0.1:3000"]) # Only allow CORS origin for localhost:3000 # and any subdomain of azurestaticapps.net/ @@ -254,11 +254,15 @@ def destroy_schema(): parser = ArgumentParser() parser.add_argument( - "-p", "--port", default=5000, type=int, help="Port to listen on" + "-P", "--port", default=5000, type=int, help="Port to listen on" + ) + parser.add_argument( + "-H", "--host", default="0.0.0.0", type=str, help="Host" ) args = parser.parse_args() port = args.port + host = args.host flask_app = create_app() - serve(flask_app, port=port) + serve(flask_app, port=port, host=host) From 86a09784eb3cff17b145acee216a4714b49b2e8d Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 6 Feb 2024 13:47:18 -0800 Subject: [PATCH 410/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20fix=20module=20tr?= =?UTF-8?q?ansform=20bug=20(needed=20to=20create=20deep=20copy)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 1 + apis/dashboard.py | 56 +++-- app.py | 84 ++++--- modules/etl/config/aireadi_config.py | 250 +-------------------- modules/etl/transforms/module_transform.py | 12 +- modules/etl/vtypes/categorical.py | 2 - modules/etl/vtypes/continuous.py | 3 +- modules/etl/vtypes/discrete.py | 3 +- modules/etl/vtypes/timeseries.py | 3 - 9 files changed, 83 insertions(+), 331 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 941742a6..18610b67 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -138,6 +138,7 @@ def validate_password(instance): "email_address": data["email_address"], "password": data["password"], } + print(data_no_code) validate( instance=data_no_code, schema=schema, format_checker=format_checker ) diff --git a/apis/dashboard.py b/apis/dashboard.py index 71c2c9b4..197bae87 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -292,9 +292,9 @@ def post(self, study_id: int): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 @@ -329,6 +329,7 @@ class RedcapProjectDashboard(Resource): @cache.cached(query_string=True) def get(self, study_id: int): """Get REDCap project dashboard""" + model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not get this dashboard", 403 @@ -352,9 +353,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -363,21 +364,31 @@ def get(self, study_id: int): } | redcapTransformConfig redcapTransform = RedcapTransform(redcap_etl_config) - mergedTransform = redcapTransform.merged # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["dashboard_modules"]: - transform, module_etl_config = moduleTransformConfigs[ - dashboard_module["id"] - ] - print(transform, module_etl_config) - transformed = getattr(ModuleTransform(module_etl_config), transform)( - mergedTransform - ).transformed - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": transformed, - } + if dashboard_module["selected"]: + mergedTransform = redcapTransform.merged + print("selected", dashboard_module["id"]) + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] + print("transform", transform) + print("module etl config", module_etl_config) + moduleTransform = ModuleTransform(module_etl_config) + transformed = getattr(moduleTransform, transform)( + mergedTransform + ).transformed + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": transformed, + } + else: + print("not selected", dashboard_module["id"]) + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": [], + } return redcap_project_dashboard, 201 @@ -485,14 +496,11 @@ def put(self, study_id: int): update_redcap_project_dashboard_query = ( model.StudyRedcapProjectDashboard.query.get(data["dashboard_id"]) ) - print("data-update", data) - print("pre-update", update_redcap_project_dashboard_query.to_dict()) update_redcap_project_dashboard_query.update(data) - print("post-update", update_redcap_project_dashboard_query.to_dict()) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - update_redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = update_redcap_project_dashboard_query.to_dict() return update_redcap_project_dashboard, 201 diff --git a/app.py b/app.py index 8a13c15f..5446fad0 100644 --- a/app.py +++ b/app.py @@ -13,20 +13,20 @@ from growthbook import GrowthBook from sqlalchemy import MetaData from waitress import serve -from caching import cache import config import model from apis import api from apis.authentication import UnauthenticatedException, authentication, authorization from apis.exception import ValidationException +from caching import cache # from pyfairdatatools import __version__ bcrypt = Bcrypt() -def create_app(config_module=None): +def create_app(config_module=None, loglevel="INFO"): """Initialize the core application.""" # create and configure the app app = Flask(__name__) @@ -35,7 +35,7 @@ def create_app(config_module=None): app.config["RESTX_MASK_SWAGGER"] = False # set up logging - logging.basicConfig(level=logging.DEBUG) + logging.basicConfig(level=getattr(logging, loglevel)) # Initialize config app.config.from_object(config_module or "config") @@ -75,7 +75,7 @@ def create_app(config_module=None): "https://fairhub.io", ] if app.debug: - cors_origins.extend(["http://localhost:3000", "http://127.0.0.1:3000"]) + cors_origins.extend(["http://localhost:3000", "https://localhost:3000"]) # Only allow CORS origin for localhost:3000 # and any subdomain of azurestaticapps.net/ @@ -103,21 +103,38 @@ def create_app(config_module=None): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, Access-Control-Allow-Credentials" # app.config["CORS_SUPPORTS_CREDENTIALS"] = True - # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": "True"}}) - - # - # @app.cli.command("create-schema") - # def create_schema(): - # engine = model.db.session.get_bind() - # metadata = MetaData() - # metadata = MetaData() - # metadata.reflect(bind=engine) - # table_names = [table.name for table in metadata.tables.values()] - # print(table_names) - # if len(table_names) == 0: - # with engine.begin() as conn: - # """Create the database schema.""" - # model.db.create_all() + # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": True}}) + + @app.cli.command("create-schema") + def create_schema(): + """Create the database schema.""" + engine = model.db.session.get_bind() + metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + if len(table_names) == 0: + with engine.begin(): + model.db.create_all() + + @app.cli.command("destroy-schema") + def destroy_schema(): + """Create the database schema.""" + engine = model.db.session.get_bind() + with engine.begin(): + model.db.drop_all() + + @app.cli.command("cycle-schema") + def cycle_schema(): + """Destroy then re-create the database schema.""" + engine = model.db.session.get_bind() + with engine.begin(): + model.db.drop_all() + metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + if len(table_names) == 0: + with engine.begin(): + model.db.create_all() @app.before_request def on_before_request(): # pylint: disable = inconsistent-return-statements @@ -223,29 +240,6 @@ def on_after_request(resp): def validation_exception_handler(error): return error.args[0], 422 - @app.cli.command("destroy-schema") - def destroy_schema(): - """destroy the database schema.""" - - # if db is azure, then skip - if config.FAIRHUB_DATABASE_URL.find("azure") > -1: - return - - engine = model.db.session.get_bind() - - with engine.begin(): - model.db.drop_all() - - with app.app_context(): - engine = model.db.session.get_bind() - metadata = MetaData() - metadata.reflect(bind=engine) - table_names = [table.name for table in metadata.tables.values()] - - # The alembic table is created by default, so we need to check for more than 1 table - if len(table_names) <= 1: - with engine.begin(): - model.db.create_all() return app @@ -256,13 +250,15 @@ def destroy_schema(): parser.add_argument( "-P", "--port", default=5000, type=int, help="Port to listen on" ) + parser.add_argument("-H", "--host", default="0.0.0.0", type=str, help="Host") parser.add_argument( - "-H", "--host", default="0.0.0.0", type=str, help="Host" + "-L", "--loglevel", default="INFO", type=str, help="Logging level" ) args = parser.parse_args() port = args.port host = args.host + loglevel = args.loglevel - flask_app = create_app() + flask_app = create_app(loglevel=loglevel) serve(flask_app, port=port, host=host) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index fc1c0619..c4678e13 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -326,12 +326,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Recruitment Survey", - # "field": "recruitment_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -370,12 +364,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "FAQ Survey", - # "field": "faq_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -414,12 +402,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Screening Survey", - # "field": "screening_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -458,12 +440,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Preconsent Survey", - # "field": "preconsent_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -502,12 +478,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Consent Survey", - # "field": "consent_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -549,12 +519,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Staff Consent Attestation Survey", - # "field": "staff_consent_attestation_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -593,12 +557,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Demographics Survey", - # "field": "demographics_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -637,12 +595,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Health Survey", - # "field": "health_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -681,12 +633,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Substance Use Survey", - # "field": "substance_use_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -725,12 +671,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "CES-D-10 Survey", - # "field": "cesd10_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -769,12 +709,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "PAID-5 DM Survey", - # "field": "paid5_dm_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -813,12 +747,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Diabetes Survey", - # "field": "diabetes_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -857,12 +785,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Dietary Survey", - # "field": "dietary_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -901,12 +823,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Opthalmic Survey", - # "field": "ophthalmic_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -945,12 +861,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "PhenX SDOH Combined Survey", - # "field": "px_sdoh_combined_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -989,12 +899,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "PhenX Food Insecurity Survey", - # "field": "px_food_insecurity_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1036,12 +940,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "PhenX Neighborhood Environment Survey", - # "field": "px_neighborhood_environment_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1083,12 +981,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "PhenX Racial and Ethnic Discrimination Survey", - # "field": "px_racial_ethnic_discrimination_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1127,12 +1019,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Decline Participation Survey", - # "field": "decline_participation_survey_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1171,12 +1057,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Study Enrollment Survey", - # "field": "study_enrollment_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1215,12 +1095,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Driving Record", - # "field": "driving_record_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1259,12 +1133,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Device Distribution", - # "field": "device_distribution_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1303,12 +1171,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Medications Assessment", - # "field": "meds_assessment_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1347,12 +1209,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Physical Assessment", - # "field": "physical_assessment_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1391,12 +1247,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "BCVA", - # "field": "bcva_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1435,12 +1285,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Photopic MARS", - # "field": "photopic_mars_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1479,12 +1323,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Mesopic MARS", - # "field": "mesopic_mars_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1523,12 +1361,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Monofilament", - # "field": "monofilament_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1567,12 +1399,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "MOCA", - # "field": "moca_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1611,12 +1437,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "ECG Survey", - # "field": "ecg_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1655,12 +1475,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Lab Results Survey", - # "field": "lab_results_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1699,12 +1513,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Specimen Management", - # "field": "specimen_management_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1743,12 +1551,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Device Return", - # "field": "device_return_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1787,12 +1589,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Disposition Survey", - # "field": "disposition_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1831,12 +1627,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Data Management Survey", - # "field": "data_management_complete", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -1879,12 +1669,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Race", - # "field": "race", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "x": { "name": "Week of the Year", "field": "scrweek", @@ -1933,12 +1717,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Phenotype", - # "field": "phenotypes", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "x": { "name": "Week of the Year", "field": "scrweek", @@ -1992,12 +1770,6 @@ "missing_value": missing_value_generic, "astype": str, }, - # "color": { - # "name": "Race", - # "field": "race", - # "missing_value": missing_value_generic, - # "astype": str, - # }, "value": { "name": "Count (N)", "field": "record_id", @@ -2045,12 +1817,6 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, "value": { "name": "Count (N)", "field": "record_id", @@ -2098,12 +1864,6 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Race", - "field": "race", - "missing_value": missing_value_generic, - "astype": str, - }, "value": { "name": "Count (N)", "field": "record_id", @@ -2151,12 +1911,6 @@ "missing_value": missing_value_generic, "astype": str, }, - "color": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, "value": { "name": "Participants (N)", "field": "record_id", @@ -2171,10 +1925,10 @@ moduleTransformConfigs: Dict[str, Any] = { "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, + "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, + "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, "race-sex-by-site": raceSexBySiteTransformConfig, - "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, - "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, "current-medications-by-site": currentMedicationsBySiteTransformConfig, } diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index 3c0d29a6..3725b8fe 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -1,7 +1,7 @@ # Library Modules from typing import Any, Callable, Union, List, Dict, Tuple from datetime import datetime -import logging, re +import logging, re, copy import modules.etl.vtypes as vtypes # Third-Party Modules @@ -49,17 +49,17 @@ def __init__( self.key = config["key"] if "key" in config else None - self.transforms: List[Dict[str, Any]] = config["transforms"] + self.transforms: List[Dict[str, Any]] = copy.deepcopy(config["transforms"]) - if len(self.transforms) < 1: + if type(self.transforms) != list: self.valid = False raise ValueError( - f"ModuleTransform instantiation missing transforms argument" + f"ModuleTransform argument transforms in config must be a list or dict type" ) - elif type(self.transforms) != list: + elif len(self.transforms) < 1: self.valid = False raise ValueError( - f"ModuleTransform argument transforms must be a list or dict type" + f"ModuleTransform instantiation missing transforms in config argument" ) else: # Transform attribute is there and has one of the correct types (list, dict) diff --git a/modules/etl/vtypes/categorical.py b/modules/etl/vtypes/categorical.py index 3415bcc4..f4e45ff6 100644 --- a/modules/etl/vtypes/categorical.py +++ b/modules/etl/vtypes/categorical.py @@ -8,7 +8,6 @@ def __init__(self) -> None: [ ("filterby", str), ("group", str), - ("color", str), ("value", int), ], str, @@ -23,7 +22,6 @@ def __init__(self) -> None: ("filterby", str), ("group", str), ("subgroup", str), - ("color", str), ("value", int), ], str, diff --git a/modules/etl/vtypes/continuous.py b/modules/etl/vtypes/continuous.py index 607745ae..b3301480 100644 --- a/modules/etl/vtypes/continuous.py +++ b/modules/etl/vtypes/continuous.py @@ -5,7 +5,7 @@ class SingleContinuous(SimpleVType): def __init__(self) -> None: super(SingleContinuous, self).__init__( "SingleContinuous", - [("filterby", str), ("group", str), ("color", str), ("x", float)], + [("filterby", str), ("group", str), ("x", float)], float, ) @@ -17,7 +17,6 @@ def __init__(self) -> None: [ ("filterby", str), ("group", str), - ("color", str), ("x", float), ("y", float), ], diff --git a/modules/etl/vtypes/discrete.py b/modules/etl/vtypes/discrete.py index ec32407f..02bb5350 100644 --- a/modules/etl/vtypes/discrete.py +++ b/modules/etl/vtypes/discrete.py @@ -5,7 +5,7 @@ class SingleDiscrete(SimpleVType): def __init__(self) -> None: super(SingleDiscrete, self).__init__( "SingleDiscrete", - [("filterby", str), ("group", str), ("color", str), ("x", int)], + [("filterby", str), ("group", str), ("x", int)], int, ) @@ -17,7 +17,6 @@ def __init__(self) -> None: [ ("filterby", str), ("group", str), - ("color", str), ("x", int), ("y", int), ], diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index 4a2e169e..a3eafddb 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -10,7 +10,6 @@ def __init__(self) -> None: [ ("filterby", str), ("subgroup", str), - ("color", str), ("datetime", datetime), ], pd._libs.tslibs.nattype.NaTType, @@ -24,7 +23,6 @@ def __init__(self) -> None: [ ("filterby", str), ("subgroup", str), - ("color", str), ("datetime", str), ("y", int), ], @@ -39,7 +37,6 @@ def __init__(self) -> None: [ ("filterby", str), ("subgroup", str), - ("color", str), ("datetime", str), ("y", float), ], From c147960c4041f96d5ce381fb073332bcf6608e0a Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 9 Feb 2024 12:13:52 -0800 Subject: [PATCH 411/505] =?UTF-8?q?=E2=9C=A8feat:=20improved=20cache=20man?= =?UTF-8?q?agement?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 165 ++++++++++++++++++++++---------------- cache-docker-compose.yaml | 2 +- 2 files changed, 96 insertions(+), 71 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 197bae87..393616c9 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -7,6 +7,7 @@ # from flask_caching import Cache from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate +from redcap import RedcapError import model from caching import cache @@ -309,14 +310,17 @@ def get(self, study_id: int): study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not get this dashboard", 403 + + # Get Dashboard Connector dashboard_id = dashboard_parser.parse_args()["dashboard_id"] - # Get Dashboard - redcap_project_dashboard_connector: Any = model.db.session.query( + redcap_project_dashboard_connector_query: Any = model.db.session.query( model.StudyRedcapProjectDashboard ).get(dashboard_id) - redcap_project_dashboard_connector = ( - redcap_project_dashboard_connector.to_dict() - ) + + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() + return redcap_project_dashboard_connector, 201 @@ -326,71 +330,83 @@ class RedcapProjectDashboard(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) - @cache.cached(query_string=True) def get(self, study_id: int): """Get REDCap project dashboard""" model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not get this dashboard", 403 - dashboard_id = dashboard_parser.parse_args()["dashboard_id"] + # Get Dashboard - redcap_project_dashboard: Any = model.db.session.query( - model.StudyRedcapProjectDashboard - ).get(dashboard_id) - redcap_project_dashboard = redcap_project_dashboard.to_dict() - # Get REDCap Project - project_id = redcap_project_dashboard["project_id"] - redcap_project_view: Any = model.db.session.query( - model.StudyRedcapProjectApi - ).get(project_id) - redcap_project_view = redcap_project_view.to_dict() - - # Set report_ids for ETL - for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(redcapTransformConfig["reports"]): - if ( - report["report_key"] == report_config["key"] - and len(report["report_id"]) > 0 - ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] - - # Structure REDCap ETL Config - redcap_etl_config = { - "redcap_api_url": redcap_project_view["project_api_url"], - "redcap_api_key": redcap_project_view["project_api_key"], - } | redcapTransformConfig - - redcapTransform = RedcapTransform(redcap_etl_config) - - # Execute Dashboard Module Transforms - for dashboard_module in redcap_project_dashboard["dashboard_modules"]: - if dashboard_module["selected"]: - mergedTransform = redcapTransform.merged - print("selected", dashboard_module["id"]) - transform, module_etl_config = moduleTransformConfigs[ - dashboard_module["id"] - ] - print("transform", transform) - print("module etl config", module_etl_config) - moduleTransform = ModuleTransform(module_etl_config) - transformed = getattr(moduleTransform, transform)( - mergedTransform - ).transformed - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": transformed, - } - else: - print("not selected", dashboard_module["id"]) - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": [], - } - - return redcap_project_dashboard, 201 + dashboard_id = dashboard_parser.parse_args()["dashboard_id"] + + # Retrieve Dashboard Redis Cache + cached_redcap_project_dashboard = cache.get(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") + + if cached_redcap_project_dashboard is not None: + + return cached_redcap_project_dashboard, 201 + + else: + + redcap_project_dashboard_query: Any = model.db.session.query( + model.StudyRedcapProjectDashboard + ).get(dashboard_id) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() + + # Get REDCap Project + project_id = redcap_project_dashboard["project_id"] + redcap_project_view_query: Any = model.db.session.query( + model.StudyRedcapProjectApi + ).get(project_id) + redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() + + # Set report_ids for ETL + for report in redcap_project_dashboard["reports"]: + for i, report_config in enumerate(redcapTransformConfig["reports"]): + if ( + report["report_key"] == report_config["key"] + and len(report["report_id"]) > 0 + ): + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] + + # Structure REDCap ETL Config + redcap_etl_config = { + "redcap_api_url": redcap_project_view["project_api_url"], + "redcap_api_key": redcap_project_view["project_api_key"], + } | redcapTransformConfig + + redcapTransform = RedcapTransform(redcap_etl_config) + + # Execute Dashboard Module Transforms + for dashboard_module in redcap_project_dashboard["dashboard_modules"]: + if dashboard_module["selected"]: + mergedTransform = redcapTransform.merged + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] + moduleTransform = ModuleTransform(module_etl_config) + transformed = getattr(moduleTransform, transform)( + mergedTransform + ).transformed + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": transformed, + } + else: + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": [], + } + + # Create Dashboard Redis Cache + cache.set(f"$study_id#{study_id}$dashboard_id#{dashboard_id}", redcap_project_dashboard) + + return redcap_project_dashboard, 201 @api.route("/study//dashboard/edit") @@ -490,17 +506,24 @@ def put(self, study_id: int): {data['dashboard_name']}""", 400, ) - # Clear Redis Cache - # TODO: We want to clear the cache by dashboard_id/cache key, not the whole cache! - cache.clear() - update_redcap_project_dashboard_query = ( - model.StudyRedcapProjectDashboard.query.get(data["dashboard_id"]) + + dashboard_id = data["dashboard_id"] + + redcap_project_dashboard_query = model.StudyRedcapProjectDashboard.query.get( + dashboard_id ) - update_redcap_project_dashboard_query.update(data) + if redcap_project_dashboard_query is None: + return "An error occurred while updating the dashboard", 500 + + redcap_project_dashboard_query.update(data) model.db.session.commit() update_redcap_project_dashboard: Dict[ str, Any - ] = update_redcap_project_dashboard_query.to_dict() + ] = redcap_project_dashboard_query.to_dict() + + # Clear Dashboard from Redis Cache + cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") + return update_redcap_project_dashboard, 201 @@ -515,9 +538,11 @@ def delete(self, study_id: int): study = model.Study.query.get(study_id) if is_granted("redcap_access", study): return "Access denied, you can not delete this redcap project", 403 + dashboard_id = dashboard_parser.parse_args()["dashboard_id"] model.StudyRedcapProjectDashboard.query.filter_by( dashboard_id=dashboard_id ).delete() model.db.session.commit() + return 204 diff --git a/cache-docker-compose.yaml b/cache-docker-compose.yaml index ff06355a..10445f97 100644 --- a/cache-docker-compose.yaml +++ b/cache-docker-compose.yaml @@ -8,7 +8,7 @@ services: CACHE_HOST: localhost CACHE_PORT: 6379 CACHE_URL: redis://127.0.0.1:6379 - CACHE_KEY_PREFIX: fairhub-io# + CACHE_KEY_PREFIX: $fairhub-io CACHE_TIMEOUT: 86400 ports: - '6379:6379' From 7641482a3860f39c7c956277eeb058782e11b74d Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 9 Feb 2024 15:31:17 -0800 Subject: [PATCH 412/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 127 +++++++++++++++++++++++----------------------- 1 file changed, 64 insertions(+), 63 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 393616c9..82c21a98 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -7,7 +7,6 @@ # from flask_caching import Cache from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate -from redcap import RedcapError import model from caching import cache @@ -341,72 +340,74 @@ def get(self, study_id: int): dashboard_id = dashboard_parser.parse_args()["dashboard_id"] # Retrieve Dashboard Redis Cache - cached_redcap_project_dashboard = cache.get(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") + cached_redcap_project_dashboard = cache.get( + f"$study_id#{study_id}$dashboard_id#{dashboard_id}" + ) if cached_redcap_project_dashboard is not None: - return cached_redcap_project_dashboard, 201 - else: - - redcap_project_dashboard_query: Any = model.db.session.query( - model.StudyRedcapProjectDashboard - ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() - - # Get REDCap Project - project_id = redcap_project_dashboard["project_id"] - redcap_project_view_query: Any = model.db.session.query( - model.StudyRedcapProjectApi - ).get(project_id) - redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() - - # Set report_ids for ETL - for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(redcapTransformConfig["reports"]): - if ( - report["report_key"] == report_config["key"] - and len(report["report_id"]) > 0 - ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] - - # Structure REDCap ETL Config - redcap_etl_config = { - "redcap_api_url": redcap_project_view["project_api_url"], - "redcap_api_key": redcap_project_view["project_api_key"], - } | redcapTransformConfig - - redcapTransform = RedcapTransform(redcap_etl_config) - - # Execute Dashboard Module Transforms - for dashboard_module in redcap_project_dashboard["dashboard_modules"]: - if dashboard_module["selected"]: - mergedTransform = redcapTransform.merged - transform, module_etl_config = moduleTransformConfigs[ - dashboard_module["id"] - ] - moduleTransform = ModuleTransform(module_etl_config) - transformed = getattr(moduleTransform, transform)( - mergedTransform - ).transformed - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": transformed, - } - else: - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": [], - } - - # Create Dashboard Redis Cache - cache.set(f"$study_id#{study_id}$dashboard_id#{dashboard_id}", redcap_project_dashboard) - - return redcap_project_dashboard, 201 + redcap_project_dashboard_query: Any = model.db.session.query( + model.StudyRedcapProjectDashboard + ).get(dashboard_id) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() + + # Get REDCap Project + project_id = redcap_project_dashboard["project_id"] + redcap_project_view_query: Any = model.db.session.query( + model.StudyRedcapProjectApi + ).get(project_id) + redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() + + # Set report_ids for ETL + for report in redcap_project_dashboard["reports"]: + for i, report_config in enumerate(redcapTransformConfig["reports"]): + if ( + report["report_key"] == report_config["key"] + and len(report["report_id"]) > 0 + ): + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] + + # Structure REDCap ETL Config + redcap_etl_config = { + "redcap_api_url": redcap_project_view["project_api_url"], + "redcap_api_key": redcap_project_view["project_api_key"], + } | redcapTransformConfig + + redcapTransform = RedcapTransform(redcap_etl_config) + + # Execute Dashboard Module Transforms + for dashboard_module in redcap_project_dashboard["dashboard_modules"]: + if dashboard_module["selected"]: + mergedTransform = redcapTransform.merged + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] + moduleTransform = ModuleTransform(module_etl_config) + transformed = getattr(moduleTransform, transform)( + mergedTransform + ).transformed + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": transformed, + } + else: + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": [], + } + + # Create Dashboard Redis Cache + cache.set( + f"$study_id#{study_id}$dashboard_id#{dashboard_id}", + redcap_project_dashboard, + ) + + return redcap_project_dashboard, 201 @api.route("/study//dashboard/edit") From cde2287f7688e459c72f07bff2f61ba18c931849 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 13 Feb 2024 00:04:35 +0000 Subject: [PATCH 413/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 82c21a98..44986bed 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -292,9 +292,9 @@ def post(self, study_id: int): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[ - str, Any - ] = connect_redcap_project_dashboard_data.to_dict() + connect_redcap_project_dashboard: Dict[str, Any] = ( + connect_redcap_project_dashboard_data.to_dict() + ) return connect_redcap_project_dashboard, 201 @@ -316,9 +316,9 @@ def get(self, study_id: int): model.StudyRedcapProjectDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[ - str, Any - ] = redcap_project_dashboard_connector_query.to_dict() + redcap_project_dashboard_connector: Dict[str, Any] = ( + redcap_project_dashboard_connector_query.to_dict() + ) return redcap_project_dashboard_connector, 201 @@ -350,9 +350,9 @@ def get(self, study_id: int): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyRedcapProjectDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Get REDCap Project project_id = redcap_project_dashboard["project_id"] @@ -368,9 +368,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( + report["report_id"] + ) # Structure REDCap ETL Config redcap_etl_config = { @@ -518,9 +518,9 @@ def put(self, study_id: int): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + update_redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From bf42b45e34e7987b2a5524e086239368132e640f Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 12 Feb 2024 16:22:36 -0800 Subject: [PATCH 414/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20remove=20https://?= =?UTF-8?q?localhost:3000from=20CORS=20in=20debug=20mode?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 5446fad0..6dca4472 100644 --- a/app.py +++ b/app.py @@ -75,7 +75,7 @@ def create_app(config_module=None, loglevel="INFO"): "https://fairhub.io", ] if app.debug: - cors_origins.extend(["http://localhost:3000", "https://localhost:3000"]) + cors_origins.extend(["http://localhost:3000"]) # Only allow CORS origin for localhost:3000 # and any subdomain of azurestaticapps.net/ From f31e2c6a094a5769ba287b7225ff680e0b0fda1b Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 12 Feb 2024 16:55:43 -0800 Subject: [PATCH 415/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20import=20error?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 60 +++++++++++++++++++++++++++++++++++++++----------- pyproject.toml | 4 ++++ 2 files changed, 51 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index f5472891..75c762c1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -407,6 +407,18 @@ files = [ {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, ] +[[package]] +name = "cachelib" +version = "0.9.0" +description = "A collection of cache libraries in the same API interface." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, + {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, +] + [[package]] name = "certifi" version = "2023.11.17" @@ -1005,6 +1017,22 @@ files = [ bcrypt = ">=3.1.1" Flask = "*" +[[package]] +name = "flask-caching" +version = "2.1.0" +description = "Adds caching support to Flask applications." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Flask-Caching-2.1.0.tar.gz", hash = "sha256:b7500c145135836a952e3de3a80881d9654e327a29c852c9265607f5c449235c"}, + {file = "Flask_Caching-2.1.0-py3-none-any.whl", hash = "sha256:f02645a629a8c89800d96dc8f690a574a0d49dcd66c7536badc6d362ba46b716"}, +] + +[package.dependencies] +cachelib = ">=0.9.0,<0.10.0" +Flask = "*" + [[package]] name = "flask-cors" version = "4.0.0" @@ -1912,16 +1940,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2479,8 +2497,6 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -3406,6 +3422,24 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3-binary"] +[[package]] +name = "sqlalchemy-json" +version = "0.7.0" +description = "JSON type with nested change tracking for SQLAlchemy" +category = "main" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "sqlalchemy-json-0.7.0.tar.gz", hash = "sha256:620d0b26f648f21a8fa9127df66f55f83a5ab4ae010e5397a5c6989a08238561"}, + {file = "sqlalchemy_json-0.7.0-py3-none-any.whl", hash = "sha256:27881d662ca18363a4ac28175cc47ea2a6f2bef997ae1159c151026b741818e6"}, +] + +[package.dependencies] +sqlalchemy = ">=0.7" + +[package.extras] +dev = ["pytest"] + [[package]] name = "stack-data" version = "0.6.3" @@ -3882,4 +3916,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8.16" -content-hash = "fa8a89016f3b4476727c90453b9f147aac223c5530e4fc2683f2ed17b72f8f27" +content-hash = "71dbb3c0b1376753f652fd87d52c1b72d77d40e3e93adf5fa8b2ef929870647c" diff --git a/pyproject.toml b/pyproject.toml index 4ad5329f..b94848e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,9 +45,13 @@ requests = "^2.31.0" # Database flask-sqlalchemy = "^3.0.5" +sqlalchemy-json = "^0.7.0" alembic = "^1.12.1" psycopg2 = "^2.9.6" +# Caching +flask-caching = "^2.1.0" + # Auth flask-bcrypt = "^1.0.1" pyjwt = "^2.8.0" From ce7494221f0c601c63c03fd9a4039253e573b1a4 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 12 Feb 2024 17:10:01 -0800 Subject: [PATCH 416/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20import=20error,?= =?UTF-8?q?=20upgrade=20to=20Python3.10?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 205 +++++++++++++++++++++++++++++++++++++++---------- pyproject.toml | 7 +- 2 files changed, 172 insertions(+), 40 deletions(-) diff --git a/poetry.lock b/poetry.lock index 75c762c1..dff9288d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,8 +13,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} Mako = "*" SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" @@ -220,6 +218,18 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + [[package]] name = "attrs" version = "23.1.0" @@ -251,9 +261,6 @@ files = [ {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, ] -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] @@ -992,7 +999,6 @@ files = [ [package.dependencies] blinker = ">=1.6.2" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" Werkzeug = ">=2.3.7" @@ -1254,7 +1260,7 @@ files = [ name = "importlib-metadata" version = "7.0.0" description = "Read metadata from Python packages" -category = "main" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1282,9 +1288,6 @@ files = [ {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, ] -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] @@ -1360,7 +1363,6 @@ prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] @@ -1520,11 +1522,9 @@ files = [ attrs = ">=22.2.0" fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} referencing = ">=0.28.4" rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} @@ -1549,7 +1549,6 @@ files = [ ] [package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.31.0" [[package]] @@ -1586,7 +1585,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" @@ -1682,7 +1680,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-server = ">=1.1.2" [[package]] @@ -1756,8 +1753,6 @@ files = [ [package.dependencies] async-lru = ">=1.0.0" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} -importlib-resources = {version = ">=1.4", markers = "python_version < \"3.9\""} ipykernel = "*" jinja2 = ">=3.0.3" jupyter-core = "*" @@ -1802,7 +1797,6 @@ files = [ [package.dependencies] babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jinja2 = ">=3.0.3" json5 = ">=0.9.0" jsonschema = ">=4.18.0" @@ -1906,9 +1900,6 @@ files = [ {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, ] -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - [package.extras] testing = ["coverage", "pyyaml"] @@ -2161,7 +2152,6 @@ files = [ beautifulsoup4 = "*" bleach = "!=5.0.0" defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" @@ -2260,6 +2250,52 @@ jupyter-server = ">=1.8,<3" [package.extras] test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + [[package]] name = "overrides" version = "7.4.0" @@ -2284,6 +2320,79 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pandas" +version = "2.2.0" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, + {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pandocfilters" version = "1.5.0" @@ -2363,18 +2472,6 @@ files = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - [[package]] name = "platformdirs" version = "4.1.0" @@ -2668,7 +2765,6 @@ mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] spelling = ["pyenchant (>=3.2,<4.0)"] @@ -3095,6 +3191,25 @@ packaging = "*" [package.extras] test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] +[[package]] +name = "redis" +version = "5.0.1" +description = "Python client for Redis database and key-value store" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, + {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + [[package]] name = "referencing" version = "0.32.0" @@ -3625,6 +3740,18 @@ files = [ {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "uri-template" version = "1.3.0" @@ -3901,7 +4028,7 @@ files = [ name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3915,5 +4042,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -python-versions = "^3.8.16" -content-hash = "71dbb3c0b1376753f652fd87d52c1b72d77d40e3e93adf5fa8b2ef929870647c" +python-versions = "^3.10" +content-hash = "19c2a1eb80debc4cb95aea2403524e3c6218008e82761e79ca2b4fbdf9d5b419" diff --git a/pyproject.toml b/pyproject.toml index b94848e9..9c4848ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ classifiers = [ [tool.poetry.dependencies] -python = "^3.8.16" +python = "^3.10" # Core Flask = "^2.3.2" @@ -51,6 +51,7 @@ psycopg2 = "^2.9.6" # Caching flask-caching = "^2.1.0" +redis = "^5.0.1" # Auth flask-bcrypt = "^1.0.1" @@ -72,6 +73,10 @@ growthbook = "^1.0.0" # Fair Data Tools pyfairdatatools = "0.1.3" +# Dashboard ETL +pandas = "^2.2.0" +numpy = "^1.26.4" + [tool.poetry.group.dev.dependencies] # Environment From ef0efeafecac7234212e2b3fa7df790d386f7df6 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Mon, 12 Feb 2024 17:25:42 -0800 Subject: [PATCH 417/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20import=20error=20?= =?UTF-8?q?-=20PyCap?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- poetry.lock | 37 ++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index dff9288d..377b92ed 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2630,6 +2630,25 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "pycap" +version = "2.6.0" +description = "PyCap: Python interface to REDCap" +category = "main" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pycap-2.6.0-py3-none-any.whl", hash = "sha256:404a7ba299fa57f0fcadd9f4b6df123e593deda1dcb12b341f39b416b6e83d6b"}, + {file = "pycap-2.6.0.tar.gz", hash = "sha256:68d7403bf573b03ae24cb252fb1e5f73fe365b6c9d54c46199014edaffcc8f94"}, +] + +[package.dependencies] +requests = ">=2.20,<3.0" +semantic-version = ">=2.8.5,<3.0.0" + +[package.extras] +data-science = ["pandas (>=1.3.4,<2.0.0)"] + [[package]] name = "pycodestyle" version = "2.11.1" @@ -3384,6 +3403,22 @@ files = [ {file = "rpds_py-0.15.2.tar.gz", hash = "sha256:373b76eeb79e8c14f6d82cb1d4d5293f9e4059baec6c1b16dca7ad13b6131b39"}, ] +[[package]] +name = "semantic-version" +version = "2.10.0" +description = "A library implementing the 'SemVer' scheme." +category = "main" +optional = false +python-versions = ">=2.7" +files = [ + {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, + {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, +] + +[package.extras] +dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"] +doc = ["Sphinx", "sphinx-rtd-theme"] + [[package]] name = "send2trash" version = "1.8.2" @@ -4043,4 +4078,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "19c2a1eb80debc4cb95aea2403524e3c6218008e82761e79ca2b4fbdf9d5b419" +content-hash = "345790662c5bcdbe8b1e4ab4239ab35b3a528be2b0c46db2d2dcdc0276e26cf7" diff --git a/pyproject.toml b/pyproject.toml index 9c4848ed..988b3077 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,6 +76,7 @@ pyfairdatatools = "0.1.3" # Dashboard ETL pandas = "^2.2.0" numpy = "^1.26.4" +pycap = "^2.6.0" [tool.poetry.group.dev.dependencies] From 5cf1ecd7cb646f9b051eb3dbe6cbd96a0cc9b99e Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 13 Feb 2024 12:13:20 -0800 Subject: [PATCH 418/505] =?UTF-8?q?=E2=9C=A8feat:=20redcap=20+=20dashboard?= =?UTF-8?q?=20auth=20management?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 35 +++++++++++++++++------------------ apis/dashboard.py | 13 ++++++------- apis/redcap.py | 10 +++++----- 3 files changed, 28 insertions(+), 30 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 18610b67..75429570 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -1,7 +1,6 @@ """This module is used to authenticate users to the system and handle few authentication errors. Also, it sets token for logged user along with expiration date""" - import datetime import importlib import os @@ -79,6 +78,7 @@ def validate_is_valid_email(instance): email_address = instance try: validate_email(email_address, check_deliverability=False) + return True except EmailNotValidError as e: raise ValidationError("Invalid email address format") from e @@ -115,8 +115,7 @@ def validate_password(instance): # Schema validation schema = { "type": "object", - # "required": ["email_address", "password", "code"], - "required": ["email_address", "password"], + "required": ["email_address", "password", "code"], "additionalProperties": False, "properties": { "email_address": {"type": "string", "format": "valid_email"}, @@ -124,7 +123,7 @@ def validate_password(instance): "type": "string", "format": "password", }, - # "code": {"type": "string"}, + "code": {"type": "string"}, }, } @@ -133,18 +132,8 @@ def validate_password(instance): format_checker.checks("password")(validate_password) try: - # Remove the code property for dev purposes - data_no_code = { - "email_address": data["email_address"], - "password": data["password"], - } - print(data_no_code) - validate( - instance=data_no_code, schema=schema, format_checker=format_checker - ) - # validate(instance=data, schema=schema, format_checker=format_checker) + validate(instance=data, schema=schema, format_checker=format_checker) except ValidationError as e: - print(e) return e.message, 400 user = model.User.query.filter_by( @@ -206,6 +195,7 @@ def validate_is_valid_email(instance): format_checker = FormatChecker() format_checker.checks("valid email")(validate_is_valid_email) + try: validate(instance=data, schema=schema, format_checker=format_checker) except ValidationError as e: @@ -332,8 +322,13 @@ def is_granted(permission: str, study=None): "participant", "study_metadata", "dataset_metadata", + "add_redcap", + "update_redcap", + "delete_redcap", + "add_dashboard", + "update_dashboard", + "delete_dashboard", "make_owner", - # "redcap_access", ], "admin": [ "admin", @@ -352,7 +347,12 @@ def is_granted(permission: str, study=None): "participant", "study_metadata", "dataset_metadata", - # "redcap_access", + "add_redcap", + "update_redcap", + "delete_redcap", + "add_dashboard", + "update_dashboard", + "delete_delete", ], "editor": [ "editor", @@ -366,7 +366,6 @@ def is_granted(permission: str, study=None): "study_metadata", "version", "dataset_metadata", - # "redcap_access", ], "viewer": ["viewer", "view"], } diff --git a/apis/dashboard.py b/apis/dashboard.py index 44986bed..9f9c3a42 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -24,7 +24,6 @@ required=True, readonly=True, description="Filterby field" ), "group": fields.String(required=True, readonly=True, description="Group field"), - # "color": fields.String(required=True, readonly=True, description="Color field"), "subgroup": fields.String( required=False, readonly=True, description="Subgroup field" ), @@ -186,7 +185,7 @@ class RedcapProjectDashboards(Resource): def get(self, study_id: int): """Get all REDCap project dashboards""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("redcap_access", study): + if is_granted("viewer", study): return "Access denied, you can not modify", 403 redcap_project_dashboards_query = ( model.StudyRedcapProjectDashboard.query.filter_by(study=study) @@ -207,7 +206,7 @@ class AddRedcapProjectDashboard(Resource): def post(self, study_id: int): """Create REDCap project dashboard""" study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): + if is_granted("add_dashboard", study): return "Access denied, you can not modify", 403 # Schema validation schema = { @@ -307,7 +306,7 @@ class RedcapProjectDashboardConnector(Resource): def get(self, study_id: int): """Get REDCap project dashboard connector""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("redcap_access", study): + if is_granted("viewer", study): return "Access denied, you can not get this dashboard", 403 # Get Dashboard Connector @@ -333,7 +332,7 @@ def get(self, study_id: int): """Get REDCap project dashboard""" model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) - if is_granted("redcap_access", study): + if is_granted("viewer", study): return "Access denied, you can not get this dashboard", 403 # Get Dashboard @@ -419,7 +418,7 @@ class EditRedcapProjectDashboard(Resource): def put(self, study_id: int): """Update REDCap project dashboard""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("redcap_access", study): + if is_granted("update_dashboard", study): return "Access denied, you can not modify this dashboard", 403 # Schema validation schema = { @@ -537,7 +536,7 @@ class DeleteRedcapProjectDashboard(Resource): def delete(self, study_id: int): """Delete REDCap project dashboard""" study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): + if is_granted("delete_dashboard", study): return "Access denied, you can not delete this redcap project", 403 dashboard_id = dashboard_parser.parse_args()["dashboard_id"] diff --git a/apis/redcap.py b/apis/redcap.py index a3c04fa4..c1551e1b 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -66,7 +66,7 @@ class RedcapProjectAPIs(Resource): def get(self, study_id: int): """Get all REDCap project API links""" study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): + if is_granted("viewer", study): return ( "Access denied, you can not view the redcap projects for this study", 403, @@ -87,7 +87,7 @@ class AddRedcapProjectAPI(Resource): def post(self, study_id: int): """Create REDCap project API link""" study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): + if is_granted("add_redcap", study): return "Access denied, you can not create a redcap project", 403 # Schema validation data: Union[Any, dict] = request.json @@ -162,7 +162,7 @@ class RedcapProjectAPI(Resource): def get(self, study_id: int): """Get REDCap project API link""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("redcap_access", study): + if is_granted("viewer", study): return "Access denied, you can not get this redcap project", 403 project_id = project_parser.parse_args()["project_id"] redcap_project_view: Any = model.db.session.query( @@ -181,7 +181,7 @@ class EditRedcapProjectAPI(Resource): def put(self, study_id: int): """Update REDCap project API link""" study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): + if is_granted("update_redcap", study): return "Access denied, you can not modify this redcap project", 403 # Schema validation data: Union[Any, dict] = request.json @@ -248,7 +248,7 @@ class DeleteRedcapProjectAPI(Resource): def delete(self, study_id: int): """Delete REDCap project API link""" study = model.Study.query.get(study_id) - if is_granted("redcap_access", study): + if is_granted("delete_redcap", study): return "Access denied, you can not delete this redcap project", 403 project_id = project_parser.parse_args()["project_id"] model.StudyRedcapProjectApi.query.filter_by(project_id=project_id).delete() From 05db29b58ee492f95178229e90ab610867e6fc12 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 13 Feb 2024 20:13:49 +0000 Subject: [PATCH 419/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apis/authentication.py b/apis/authentication.py index 75429570..2200e07f 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -1,6 +1,7 @@ """This module is used to authenticate users to the system and handle few authentication errors. Also, it sets token for logged user along with expiration date""" + import datetime import importlib import os From 7c69aadb0c6504726f4e9e3d041edaee69062635 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 13 Feb 2024 12:20:44 -0800 Subject: [PATCH 420/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 9f9c3a42..53410747 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -291,9 +291,9 @@ def post(self, study_id: int): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 @@ -315,9 +315,9 @@ def get(self, study_id: int): model.StudyRedcapProjectDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[str, Any] = ( - redcap_project_dashboard_connector_query.to_dict() - ) + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() return redcap_project_dashboard_connector, 201 @@ -349,9 +349,9 @@ def get(self, study_id: int): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyRedcapProjectDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Get REDCap Project project_id = redcap_project_dashboard["project_id"] @@ -367,9 +367,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -517,9 +517,9 @@ def put(self, study_id: int): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From 745a84b7a1abb37851dbb2297d636c84955cea5a Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 13 Feb 2024 20:21:47 +0000 Subject: [PATCH 421/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 53410747..9f9c3a42 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -291,9 +291,9 @@ def post(self, study_id: int): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[ - str, Any - ] = connect_redcap_project_dashboard_data.to_dict() + connect_redcap_project_dashboard: Dict[str, Any] = ( + connect_redcap_project_dashboard_data.to_dict() + ) return connect_redcap_project_dashboard, 201 @@ -315,9 +315,9 @@ def get(self, study_id: int): model.StudyRedcapProjectDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[ - str, Any - ] = redcap_project_dashboard_connector_query.to_dict() + redcap_project_dashboard_connector: Dict[str, Any] = ( + redcap_project_dashboard_connector_query.to_dict() + ) return redcap_project_dashboard_connector, 201 @@ -349,9 +349,9 @@ def get(self, study_id: int): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyRedcapProjectDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Get REDCap Project project_id = redcap_project_dashboard["project_id"] @@ -367,9 +367,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( + report["report_id"] + ) # Structure REDCap ETL Config redcap_etl_config = { @@ -517,9 +517,9 @@ def put(self, study_id: int): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + update_redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From 478cb120b7cf7f227fa07b0662c49766b9b84ed4 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 13 Feb 2024 12:36:12 -0800 Subject: [PATCH 422/505] =?UTF-8?q?=F0=9F=90=9B=20fix:=20minor=20fixes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 37 +++++++++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/app.py b/app.py index 6dca4472..db7a6ce9 100644 --- a/app.py +++ b/app.py @@ -105,20 +105,23 @@ def create_app(config_module=None, loglevel="INFO"): # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": True}}) - @app.cli.command("create-schema") - def create_schema(): - """Create the database schema.""" - engine = model.db.session.get_bind() - metadata = MetaData() - metadata.reflect(bind=engine) - table_names = [table.name for table in metadata.tables.values()] - if len(table_names) == 0: - with engine.begin(): - model.db.create_all() + # @app.cli.command("create-schema") + # def create_schema(): + # """Create the database schema.""" + # engine = model.db.session.get_bind() + # metadata = MetaData() + # metadata.reflect(bind=engine) + # table_names = [table.name for table in metadata.tables.values()] + # if len(table_names) == 0: + # with engine.begin(): + # model.db.create_all() @app.cli.command("destroy-schema") def destroy_schema(): """Create the database schema.""" + # If DB is Azure, Skip + if config.FAIRHUB_DATABASE_URL.find("azure") > -1: + return engine = model.db.session.get_bind() with engine.begin(): model.db.drop_all() @@ -126,6 +129,9 @@ def destroy_schema(): @app.cli.command("cycle-schema") def cycle_schema(): """Destroy then re-create the database schema.""" + # If DB is Azure, Skip + if config.FAIRHUB_DATABASE_URL.find("azure") > -1: + return engine = model.db.session.get_bind() with engine.begin(): model.db.drop_all() @@ -240,6 +246,17 @@ def on_after_request(resp): def validation_exception_handler(error): return error.args[0], 422 + with app.app_context(): + engine = model.db.session.get_bind() + metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + + # The alembic table is created by default, so we need to check for more than 1 table + if len(table_names) <= 1: + with engine.begin(): + model.db.create_all() + return app From 12d080a8c77a5984997c00cc6e2660eb9428f016 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 15 Feb 2024 11:01:15 -0800 Subject: [PATCH 423/505] =?UTF-8?q?=F0=9F=94=A8=20chore:=20update=20side?= =?UTF-8?q?=20scripts?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 16 +- dev-docker-compose.yaml | 53 +- poetry.lock | 1233 +++++++++++++++++++++------------------ pyproject.toml | 7 +- 4 files changed, 688 insertions(+), 621 deletions(-) diff --git a/README.md b/README.md index dd7ec5a7..2d9f643b 100644 --- a/README.md +++ b/README.md @@ -85,23 +85,29 @@ Don't forget to start the database before running the api. See [Database](#datab ## Database -The api uses a postgres database. You can create a database locally using docker: +The api uses a postgres and redis database. You can create both of these locally via docker: ```bash -docker-compose -f ./db-docker-compose.yaml up -docker-compose -f ./db-docker-compose.yaml up -d # if you want the db to run in the background +docker-compose -f ./dev-docker-compose.yaml up +docker-compose -f ./dev-docker-compose.yaml up -d # if you want the db to run in the background ``` Close the database with: ```bash -docker-compose -f ./db-docker-compose.yaml down -v +docker-compose -f ./dev-docker-compose.yaml down -v ``` ## Running For developer mode: +```bash +poe dev +``` + +or + ```bash flask run --debug ``` @@ -118,5 +124,5 @@ This work is licensed under [MIT](https://opensource.org/licenses/mit). See [LICENSE](https://github.com/AI-READI/pyfairdatatools/blob/main/LICENSE) for more information. - + AI-READI logo diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 2879955f..6ed955b3 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -1,63 +1,30 @@ version: '3' services: - flask-api: - build: - context: . - dockerfile: Dockerfile - ports: - - 5000:5000 - # volumes: - # - ./apis:/app/apis - # - ./model:/app/model - # - ./core:/app/core - # - ./app.py:/app/ - # - ./config.py:/app/ - environment: - FLASK_APP: api.fairhub.io - FLASK_ENV: development - FLASK_DEBUG: 1 - FAIRHUB_DATABASE_URL: "${FAIRHUB_DATABASE_URL:-postgresql://postgres:postgres@database:5432/postgres}" - secret: "aaldkljla;jsdjklajlkkljdkljakjl;d;" - depends_on: - database: - condition: service_healthy - database: + postgres: image: postgres:latest + restart: always environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - PGUSER: postgres + POSTGRES_USER: admin + POSTGRES_PASSWORD: root + POSTGRES_DB: fairhub_local ports: - 5432:5432 - # restart: always - healthcheck: - test: pg_isready - interval: 10s - timeout: 5s - retries: 5 - # volumes: - # - ./postgres-data:/var/lib/postgresql/data - # - ./sql/init_timezones.sql:/docker-entrypoint-initdb.d/1-schema.sql + volumes: + - ./postgres-data:/var/lib/postgresql/data cache: image: redis:7.2-alpine - # restart: always + restart: always environment: CACHE_DB: fairhub CACHE_HOST: localhost CACHE_PORT: 6379 - CACHE_TYPE: RedisCache CACHE_URL: redis://127.0.0.1:6379 - CACHE_PREFIX: fairhub-io# + CACHE_KEY_PREFIX: $fairhub-io CACHE_TIMEOUT: 86400 - CACHE_PASSWORD: development ports: - '6379:6379' command: redis-server --save 20 1 --loglevel warning volumes: - - cache:/data - # volumes: - # cache: - # driver: local + - ./redis-data:/data diff --git a/poetry.lock b/poetry.lock index 377b92ed..c58710fc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -60,14 +60,14 @@ trio = ["trio (>=0.23)"] [[package]] name = "appnope" -version = "0.1.3" +version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, ] [[package]] @@ -232,22 +232,23 @@ files = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "babel" @@ -264,18 +265,6 @@ files = [ [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] - [[package]] name = "bcrypt" version = "4.1.2" @@ -319,53 +308,56 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.12.2" +version = "4.12.3" description = "Screen-scraping library" category = "dev" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, ] [package.dependencies] soupsieve = ">1.2" [package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "black" -version = "23.12.0" +version = "23.12.1" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67f19562d367468ab59bd6c36a72b2c84bc2f16b59788690e02bbcb140a77175"}, - {file = "black-23.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bbd75d9f28a7283b7426160ca21c5bd640ca7cd8ef6630b4754b6df9e2da8462"}, - {file = "black-23.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:593596f699ca2dcbbbdfa59fcda7d8ad6604370c10228223cd6cf6ce1ce7ed7e"}, - {file = "black-23.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:12d5f10cce8dc27202e9a252acd1c9a426c83f95496c959406c96b785a92bb7d"}, - {file = "black-23.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e73c5e3d37e5a3513d16b33305713237a234396ae56769b839d7c40759b8a41c"}, - {file = "black-23.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ba09cae1657c4f8a8c9ff6cfd4a6baaf915bb4ef7d03acffe6a2f6585fa1bd01"}, - {file = "black-23.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace64c1a349c162d6da3cef91e3b0e78c4fc596ffde9413efa0525456148873d"}, - {file = "black-23.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:72db37a2266b16d256b3ea88b9affcdd5c41a74db551ec3dd4609a59c17d25bf"}, - {file = "black-23.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fdf6f23c83078a6c8da2442f4d4eeb19c28ac2a6416da7671b72f0295c4a697b"}, - {file = "black-23.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39dda060b9b395a6b7bf9c5db28ac87b3c3f48d4fdff470fa8a94ab8271da47e"}, - {file = "black-23.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7231670266ca5191a76cb838185d9be59cfa4f5dd401b7c1c70b993c58f6b1b5"}, - {file = "black-23.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:193946e634e80bfb3aec41830f5d7431f8dd5b20d11d89be14b84a97c6b8bc75"}, - {file = "black-23.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcf91b01ddd91a2fed9a8006d7baa94ccefe7e518556470cf40213bd3d44bbbc"}, - {file = "black-23.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:996650a89fe5892714ea4ea87bc45e41a59a1e01675c42c433a35b490e5aa3f0"}, - {file = "black-23.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdbff34c487239a63d86db0c9385b27cdd68b1bfa4e706aa74bb94a435403672"}, - {file = "black-23.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:97af22278043a6a1272daca10a6f4d36c04dfa77e61cbaaf4482e08f3640e9f0"}, - {file = "black-23.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ead25c273adfad1095a8ad32afdb8304933efba56e3c1d31b0fee4143a1e424a"}, - {file = "black-23.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c71048345bdbced456cddf1622832276d98a710196b842407840ae8055ade6ee"}, - {file = "black-23.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a832b6e00eef2c13b3239d514ea3b7d5cc3eaa03d0474eedcbbda59441ba5d"}, - {file = "black-23.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:6a82a711d13e61840fb11a6dfecc7287f2424f1ca34765e70c909a35ffa7fb95"}, - {file = "black-23.12.0-py3-none-any.whl", hash = "sha256:a7c07db8200b5315dc07e331dda4d889a56f6bf4db6a9c2a526fa3166a81614f"}, - {file = "black-23.12.0.tar.gz", hash = "sha256:330a327b422aca0634ecd115985c1c7fd7bdb5b5a2ef8aa9888a82e2ebe9437a"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -428,14 +420,14 @@ files = [ [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -632,14 +624,14 @@ files = [ [[package]] name = "comm" -version = "0.2.0" +version = "0.2.1" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "comm-0.2.0-py3-none-any.whl", hash = "sha256:2da8d9ebb8dd7bfc247adaff99f24dce705638a8042b85cb995066793e391001"}, - {file = "comm-0.2.0.tar.gz", hash = "sha256:a517ea2ca28931c7007a7a99c562a0fa5883cfb48963140cf642c41c948498be"}, + {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, + {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, ] [package.dependencies] @@ -650,64 +642,64 @@ test = ["pytest"] [[package]] name = "coverage" -version = "7.3.4" +version = "7.4.1" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aff2bd3d585969cc4486bfc69655e862028b689404563e6b549e6a8244f226df"}, - {file = "coverage-7.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4353923f38d752ecfbd3f1f20bf7a3546993ae5ecd7c07fd2f25d40b4e54571"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea473c37872f0159294f7073f3fa72f68b03a129799f3533b2bb44d5e9fa4f82"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5214362abf26e254d749fc0c18af4c57b532a4bfde1a057565616dd3b8d7cc94"}, - {file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f99b7d3f7a7adfa3d11e3a48d1a91bb65739555dd6a0d3fa68aa5852d962e5b1"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:74397a1263275bea9d736572d4cf338efaade2de9ff759f9c26bcdceb383bb49"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f154bd866318185ef5865ace5be3ac047b6d1cc0aeecf53bf83fe846f4384d5d"}, - {file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e0d84099ea7cba9ff467f9c6f747e3fc3906e2aadac1ce7b41add72e8d0a3712"}, - {file = "coverage-7.3.4-cp310-cp310-win32.whl", hash = "sha256:3f477fb8a56e0c603587b8278d9dbd32e54bcc2922d62405f65574bd76eba78a"}, - {file = "coverage-7.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:c75738ce13d257efbb6633a049fb2ed8e87e2e6c2e906c52d1093a4d08d67c6b"}, - {file = "coverage-7.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:997aa14b3e014339d8101b9886063c5d06238848905d9ad6c6eabe533440a9a7"}, - {file = "coverage-7.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a9c5bc5db3eb4cd55ecb8397d8e9b70247904f8eca718cc53c12dcc98e59fc8"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27ee94f088397d1feea3cb524e4313ff0410ead7d968029ecc4bc5a7e1d34fbf"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ce03e25e18dd9bf44723e83bc202114817f3367789052dc9e5b5c79f40cf59d"}, - {file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85072e99474d894e5df582faec04abe137b28972d5e466999bc64fc37f564a03"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a877810ef918d0d345b783fc569608804f3ed2507bf32f14f652e4eaf5d8f8d0"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ac17b94ab4ca66cf803f2b22d47e392f0977f9da838bf71d1f0db6c32893cb9"}, - {file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36d75ef2acab74dc948d0b537ef021306796da551e8ac8b467810911000af66a"}, - {file = "coverage-7.3.4-cp311-cp311-win32.whl", hash = "sha256:47ee56c2cd445ea35a8cc3ad5c8134cb9bece3a5cb50bb8265514208d0a65928"}, - {file = "coverage-7.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:11ab62d0ce5d9324915726f611f511a761efcca970bd49d876cf831b4de65be5"}, - {file = "coverage-7.3.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:33e63c578f4acce1b6cd292a66bc30164495010f1091d4b7529d014845cd9bee"}, - {file = "coverage-7.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:782693b817218169bfeb9b9ba7f4a9f242764e180ac9589b45112571f32a0ba6"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4277ddaad9293454da19121c59f2d850f16bcb27f71f89a5c4836906eb35ef"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d892a19ae24b9801771a5a989fb3e850bd1ad2e2b6e83e949c65e8f37bc67a1"}, - {file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3024ec1b3a221bd10b5d87337d0373c2bcaf7afd86d42081afe39b3e1820323b"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1c3e9d2bbd6f3f79cfecd6f20854f4dc0c6e0ec317df2b265266d0dc06535f1"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e91029d7f151d8bf5ab7d8bfe2c3dbefd239759d642b211a677bc0709c9fdb96"}, - {file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6879fe41c60080aa4bb59703a526c54e0412b77e649a0d06a61782ecf0853ee1"}, - {file = "coverage-7.3.4-cp312-cp312-win32.whl", hash = "sha256:fd2f8a641f8f193968afdc8fd1697e602e199931012b574194052d132a79be13"}, - {file = "coverage-7.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:d1d0ce6c6947a3a4aa5479bebceff2c807b9f3b529b637e2b33dea4468d75fc7"}, - {file = "coverage-7.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36797b3625d1da885b369bdaaa3b0d9fb8865caed3c2b8230afaa6005434aa2f"}, - {file = "coverage-7.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfed0ec4b419fbc807dec417c401499ea869436910e1ca524cfb4f81cf3f60e7"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97ff5a9fc2ca47f3383482858dd2cb8ddbf7514427eecf5aa5f7992d0571429"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:607b6c6b35aa49defaebf4526729bd5238bc36fe3ef1a417d9839e1d96ee1e4c"}, - {file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8e258dcc335055ab59fe79f1dec217d9fb0cdace103d6b5c6df6b75915e7959"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a02ac7c51819702b384fea5ee033a7c202f732a2a2f1fe6c41e3d4019828c8d3"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b710869a15b8caf02e31d16487a931dbe78335462a122c8603bb9bd401ff6fb2"}, - {file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6a23ae9348a7a92e7f750f9b7e828448e428e99c24616dec93a0720342f241d"}, - {file = "coverage-7.3.4-cp38-cp38-win32.whl", hash = "sha256:758ebaf74578b73f727acc4e8ab4b16ab6f22a5ffd7dd254e5946aba42a4ce76"}, - {file = "coverage-7.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:309ed6a559bc942b7cc721f2976326efbfe81fc2b8f601c722bff927328507dc"}, - {file = "coverage-7.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aefbb29dc56317a4fcb2f3857d5bce9b881038ed7e5aa5d3bcab25bd23f57328"}, - {file = "coverage-7.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:183c16173a70caf92e2dfcfe7c7a576de6fa9edc4119b8e13f91db7ca33a7923"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a4184dcbe4f98d86470273e758f1d24191ca095412e4335ff27b417291f5964"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93698ac0995516ccdca55342599a1463ed2e2d8942316da31686d4d614597ef9"}, - {file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb220b3596358a86361139edce40d97da7458412d412e1e10c8e1970ee8c09ab"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5b14abde6f8d969e6b9dd8c7a013d9a2b52af1235fe7bebef25ad5c8f47fa18"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:610afaf929dc0e09a5eef6981edb6a57a46b7eceff151947b836d869d6d567c1"}, - {file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed790728fb71e6b8247bd28e77e99d0c276dff952389b5388169b8ca7b1c28"}, - {file = "coverage-7.3.4-cp39-cp39-win32.whl", hash = "sha256:c15fdfb141fcf6a900e68bfa35689e1256a670db32b96e7a931cab4a0e1600e5"}, - {file = "coverage-7.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:38d0b307c4d99a7aca4e00cad4311b7c51b7ac38fb7dea2abe0d182dd4008e05"}, - {file = "coverage-7.3.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b1e0f25ae99cf247abfb3f0fac7ae25739e4cd96bf1afa3537827c576b4847e5"}, - {file = "coverage-7.3.4.tar.gz", hash = "sha256:020d56d2da5bc22a0e00a5b0d54597ee91ad72446fa4cf1b97c35022f6b6dbf0"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -737,76 +729,74 @@ requests = ">=2.28,<3.0" [[package]] name = "cryptography" -version = "41.0.7" +version = "3.4.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, - {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, - {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, - {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, - {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, - {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, - {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, - {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, - {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, - {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, + {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b"}, + {file = "cryptography-3.4.8-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c4129fc3fdc0fa8e40861b5ac0c673315b3c902bbdc05fc176764815b43dd1d"}, + {file = "cryptography-3.4.8-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:695104a9223a7239d155d7627ad912953b540929ef97ae0c34c7b8bf30857e89"}, + {file = "cryptography-3.4.8-cp36-abi3-win32.whl", hash = "sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7"}, + {file = "cryptography-3.4.8-cp36-abi3-win_amd64.whl", hash = "sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af"}, + {file = "cryptography-3.4.8-pp36-pypy36_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9"}, + {file = "cryptography-3.4.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e"}, + {file = "cryptography-3.4.8.tar.gz", hash = "sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c"}, ] [package.dependencies] cffi = ">=1.12" [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -nox = ["nox"] -pep8test = ["black", "check-sdist", "mypy", "ruff"] -sdist = ["build"] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["doc8", "pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] -test-randomorder = ["pytest-randomly"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "debugpy" -version = "1.8.0" +version = "1.8.1" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, - {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, - {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, - {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, - {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, - {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, - {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, - {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, - {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, - {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, - {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, - {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, - {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, - {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, - {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, - {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, - {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, - {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, + {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, + {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, + {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, + {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, + {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, + {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, + {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, + {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, + {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, + {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, + {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, + {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, + {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, + {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, + {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, + {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, + {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, + {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, + {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, + {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, + {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, + {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, ] [[package]] @@ -847,38 +837,40 @@ files = [ [[package]] name = "dill" -version = "0.3.7" +version = "0.3.8" description = "serialize all of Python" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "dnspython" -version = "2.4.2" +version = "2.5.0" description = "DNS toolkit" category = "main" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.8" files = [ - {file = "dnspython-2.4.2-py3-none-any.whl", hash = "sha256:57c6fbaaeaaf39c891292012060beb141791735dbb4004798328fc2c467402d8"}, - {file = "dnspython-2.4.2.tar.gz", hash = "sha256:8dcfae8c7460a2f84b4072e26f1c9f4101ca20c071649cb7c34e8b6a93d58984"}, + {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"}, + {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"}, ] [package.extras] -dnssec = ["cryptography (>=2.6,<42.0)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.24.1)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"] +dnssec = ["cryptography (>=41)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"] doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1,<4.0)"] -trio = ["trio (>=0.14,<0.23)"] -wmi = ["wmi (>=1.5.1,<2.0.0)"] +idna = ["idna (>=2.1)"] +trio = ["trio (>=0.14)"] +wmi = ["wmi (>=1.5.1)"] [[package]] name = "docopt" @@ -954,14 +946,14 @@ python-dateutil = ">=2.4" [[package]] name = "fastjsonschema" -version = "2.19.0" +version = "2.19.1" description = "Fastest Python implementation of JSON schema" category = "dev" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.19.0-py3-none-any.whl", hash = "sha256:b9fd1a2dd6971dbc7fee280a95bd199ae0dd9ce22beb91cc75e9c1c528a5170e"}, - {file = "fastjsonschema-2.19.0.tar.gz", hash = "sha256:e25df6647e1bc4a26070b700897b07b542ec898dd4f1f6ea013e7f6a88417225"}, + {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, + {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, ] [package.extras] @@ -1244,6 +1236,65 @@ cryptography = "*" typing-extensions = "*" urllib3 = "*" +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.3" +description = "A minimal low-level HTTP client." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] +trio = ["trio (>=0.22.0,<0.24.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = ">=1.0.0,<2.0.0" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + [[package]] name = "idna" version = "3.6" @@ -1258,14 +1309,14 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.0.0" +version = "7.0.1" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, - {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, ] [package.dependencies] @@ -1306,14 +1357,14 @@ files = [ [[package]] name = "ipykernel" -version = "6.27.1" +version = "6.29.2" description = "IPython Kernel for Jupyter" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"}, - {file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"}, + {file = "ipykernel-6.29.2-py3-none-any.whl", hash = "sha256:50384f5c577a260a1d53f1f59a828c7266d321c9b7d00d345693783f66616055"}, + {file = "ipykernel-6.29.2.tar.gz", hash = "sha256:3bade28004e3ff624ed57974948116670604ac5f676d12339693f3142176d3f0"}, ] [package.dependencies] @@ -1327,7 +1378,7 @@ matplotlib-inline = ">=0.1" nest-asyncio = "*" packaging = "*" psutil = "*" -pyzmq = ">=20" +pyzmq = ">=24" tornado = ">=6.1" traitlets = ">=5.4.0" @@ -1336,65 +1387,63 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.4)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" -version = "8.12.3" +version = "8.21.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" files = [ - {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, - {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, + {file = "ipython-8.21.0-py3-none-any.whl", hash = "sha256:1050a3ab8473488d7eee163796b02e511d0735cf43a04ba2a8348bd0f2eaf8a5"}, + {file = "ipython-8.21.0.tar.gz", hash = "sha256:48fbc236fbe0e138b88773fa0437751f14c3645fb483f1d4c5dee58b37e5ce73"}, ] [package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath", "trio"] [[package]] name = "ipywidgets" -version = "8.1.1" +version = "8.1.2" description = "Jupyter interactive widgets" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "ipywidgets-8.1.1-py3-none-any.whl", hash = "sha256:2b88d728656aea3bbfd05d32c747cfd0078f9d7e159cf982433b58ad717eed7f"}, - {file = "ipywidgets-8.1.1.tar.gz", hash = "sha256:40211efb556adec6fa450ccc2a77d59ca44a060f4f9f136833df59c9f538e6e8"}, + {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, + {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.9,<3.1.0" +jupyterlab-widgets = ">=3.0.10,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.9,<4.1.0" +widgetsnbextension = ">=4.0.10,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] @@ -1463,14 +1512,14 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -1508,14 +1557,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.20.0" +version = "4.21.1" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.20.0-py3-none-any.whl", hash = "sha256:ed6231f0429ecf966f5bc8dfef245998220549cbbcf140f913b7464c52c3b6b3"}, - {file = "jsonschema-4.20.0.tar.gz", hash = "sha256:4f614fd46d8d61258610998997743ec5492a648b33cf478c1ddc23ed4598a5fa"}, + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, ] [package.dependencies] @@ -1538,14 +1587,14 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.11.2" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.11.2-py3-none-any.whl", hash = "sha256:e74ba7c0a65e8cb49dc26837d6cfe576557084a8b423ed16a420984228104f93"}, - {file = "jsonschema_specifications-2023.11.2.tar.gz", hash = "sha256:9472fc4fea474cd74bea4a2b190daeccb5a9e4db2ea80efcf7a1b582fc9a81b8"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] @@ -1622,14 +1671,14 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.5.1" +version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.5.1-py3-none-any.whl", hash = "sha256:220dfb00c45f0d780ce132bb7976b58263f81a3ada6e90a9b6823785a424f739"}, - {file = "jupyter_core-5.5.1.tar.gz", hash = "sha256:1553311a97ccd12936037f36b9ab4d6ae8ceea6ad2d5c90d94a909e752178e40"}, + {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, + {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, ] [package.dependencies] @@ -1669,14 +1718,14 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.1" +version = "2.2.2" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.1.tar.gz", hash = "sha256:b17fab6d70fe83c8896b0cff59237640038247c196056b43684a0902b6a9e0fb"}, - {file = "jupyter_lsp-2.2.1-py3-none-any.whl", hash = "sha256:17a689910c5e4ae5e7d334b02f31d08ffbe98108f6f658fb05e4304b4345368b"}, + {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, + {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, ] [package.dependencies] @@ -1684,14 +1733,14 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.1" +version = "2.12.5" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server-2.12.1-py3-none-any.whl", hash = "sha256:fd030dd7be1ca572e4598203f718df6630c12bd28a599d7f1791c4d7938e1010"}, - {file = "jupyter_server-2.12.1.tar.gz", hash = "sha256:dc77b7dcc5fc0547acba2b2844f01798008667201eea27c6319ff9257d700a6d"}, + {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, + {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, ] [package.dependencies] @@ -1721,14 +1770,14 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-sc [[package]] name = "jupyter-server-terminals" -version = "0.5.0" +version = "0.5.2" description = "A Jupyter Server Extension Providing Terminals." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.5.0-py3-none-any.whl", hash = "sha256:2fc0692c883bfd891f4fba0c4b4a684a37234b0ba472f2e97ed0a3888f46e1e4"}, - {file = "jupyter_server_terminals-0.5.0.tar.gz", hash = "sha256:ebcd68c9afbf98a480a533e6f3266354336e645536953b7abcc7bdeebc0154a3"}, + {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, + {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, ] [package.dependencies] @@ -1741,18 +1790,19 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.0.9" +version = "4.1.1" description = "JupyterLab computational environment" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.9-py3-none-any.whl", hash = "sha256:9f6f8e36d543fdbcc3df961a1d6a3f524b4a4001be0327a398f68fa4e534107c"}, - {file = "jupyterlab-4.0.9.tar.gz", hash = "sha256:9ebada41d52651f623c0c9f069ddb8a21d6848e4c887d8e5ddc0613166ed5c0b"}, + {file = "jupyterlab-4.1.1-py3-none-any.whl", hash = "sha256:fa3e8c18b804eac04e51ceebd9dd3dd396e08106816f0d09cc426799d7087632"}, + {file = "jupyterlab-4.1.1.tar.gz", hash = "sha256:8acc9f561729d8f32c14c294c397917cddfeeb13a5d46f811979b71b4911a9fd"}, ] [package.dependencies] async-lru = ">=1.0.0" +httpx = ">=0.25.0" ipykernel = "*" jinja2 = ">=3.0.3" jupyter-core = "*" @@ -1766,9 +1816,9 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.10.1)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.1.4)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.2.0)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.2.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.1)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post6)", "matplotlib (==3.8.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.0)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] [[package]] @@ -1785,14 +1835,14 @@ files = [ [[package]] name = "jupyterlab-server" -version = "2.25.2" +version = "2.25.3" description = "A set of server components for JupyterLab and JupyterLab like applications." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab_server-2.25.2-py3-none-any.whl", hash = "sha256:5b1798c9cc6a44f65c757de9f97fc06fc3d42535afbf47d2ace5e964ab447aaf"}, - {file = "jupyterlab_server-2.25.2.tar.gz", hash = "sha256:bd0ec7a99ebcedc8bcff939ef86e52c378e44c2707e053fcd81d046ce979ee63"}, + {file = "jupyterlab_server-2.25.3-py3-none-any.whl", hash = "sha256:c48862519fded9b418c71645d85a49b2f0ec50d032ba8316738e9276046088c1"}, + {file = "jupyterlab_server-2.25.3.tar.gz", hash = "sha256:846f125a8a19656611df5b03e5912c8393cea6900859baa64fa515eb64a8dc40"}, ] [package.dependencies] @@ -1811,14 +1861,14 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v [[package]] name = "jupyterlab-widgets" -version = "3.0.9" +version = "3.0.10" description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.9-py3-none-any.whl", hash = "sha256:3cf5bdf5b897bf3bccf1c11873aa4afd776d7430200f765e0686bd352487b58d"}, - {file = "jupyterlab_widgets-3.0.9.tar.gz", hash = "sha256:6005a4e974c7beee84060fdfba341a3218495046de8ae3ec64888e5fe19fdb4c"}, + {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, + {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, ] [[package]] @@ -1870,14 +1920,14 @@ files = [ [[package]] name = "mako" -version = "1.3.0" +version = "1.3.2" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.0-py3-none-any.whl", hash = "sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9"}, - {file = "Mako-1.3.0.tar.gz", hash = "sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b"}, + {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, + {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, ] [package.dependencies] @@ -1905,62 +1955,72 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -2138,14 +2198,14 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= [[package]] name = "nbconvert" -version = "7.13.1" +version = "7.16.0" description = "Converting Jupyter Notebooks" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.13.1-py3-none-any.whl", hash = "sha256:3c50eb2d326478cc90b8759cf2ab9dde3d892c6537cd6a5bc0991db8ef734bcc"}, - {file = "nbconvert-7.13.1.tar.gz", hash = "sha256:2dc8267dbdfeedce2dcd34c9e3f1b51af18f43cb105549d1c5a18189ec23ba85"}, + {file = "nbconvert-7.16.0-py3-none-any.whl", hash = "sha256:ad3dc865ea6e2768d31b7eb6c7ab3be014927216a5ece3ef276748dd809054c7"}, + {file = "nbconvert-7.16.0.tar.gz", hash = "sha256:813e6553796362489ae572e39ba1bff978536192fb518e10826b0e8cadf03ec8"}, ] [package.dependencies] @@ -2198,31 +2258,31 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" -version = "1.5.8" +version = "1.6.0" description = "Patch asyncio to allow nested event loops" category = "dev" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, - {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, ] [[package]] name = "notebook" -version = "7.0.6" +version = "7.1.0" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.6-py3-none-any.whl", hash = "sha256:0fe8f67102fea3744fedf652e4c15339390902ca70c5a31c4f547fa23da697cc"}, - {file = "notebook-7.0.6.tar.gz", hash = "sha256:ec6113b06529019f7f287819af06c97a2baf7a95ac21a8f6e32192898e9f9a58"}, + {file = "notebook-7.1.0-py3-none-any.whl", hash = "sha256:a8fa4ccb5e5fe220f29d9900337efd7752bc6f2efe004d6f320db01f7743adc9"}, + {file = "notebook-7.1.0.tar.gz", hash = "sha256:99caf01ff166b1cc86355c9b37c1ba9bf566c1d7fc4ab57bb6f8f24e36c4260e"}, ] [package.dependencies] jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.0.2,<5" +jupyterlab = ">=4.1.1,<4.2" jupyterlab-server = ">=2.22.1,<3" notebook-shim = ">=0.2,<0.3" tornado = ">=6.2.0" @@ -2234,14 +2294,14 @@ test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4 [[package]] name = "notebook-shim" -version = "0.2.3" +version = "0.2.4" description = "A shim layer for notebook traits and config" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "notebook_shim-0.2.3-py3-none-any.whl", hash = "sha256:a83496a43341c1674b093bfcebf0fe8e74cbe7eda5fd2bbc56f8e39e1486c0c7"}, - {file = "notebook_shim-0.2.3.tar.gz", hash = "sha256:f69388ac283ae008cd506dda10d0288b09a017d822d5e8c7129a152cbd3ce7e9"}, + {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, + {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, ] [package.dependencies] @@ -2298,14 +2358,14 @@ files = [ [[package]] name = "overrides" -version = "7.4.0" +version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, - {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, ] [[package]] @@ -2395,16 +2455,38 @@ xml = ["lxml (>=4.9.2)"] [[package]] name = "pandocfilters" -version = "1.5.0" +version = "1.5.1" description = "Utilities for writing pandoc filters in python" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, - {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, ] +[[package]] +name = "paramiko" +version = "3.4.0" +description = "SSH2 protocol library" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, + {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, +] + +[package.dependencies] +bcrypt = ">=3.2" +cryptography = ">=3.3" +pynacl = ">=1.5" + +[package.extras] +all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +invoke = ["invoke (>=2.0)"] + [[package]] name = "parso" version = "0.8.3" @@ -2460,44 +2542,32 @@ files = [ [package.dependencies] ptyprocess = ">=0.5" -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -2525,14 +2595,14 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "prometheus-client" -version = "0.19.0" +version = "0.20.0" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, - {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, ] [package.extras] @@ -2555,28 +2625,28 @@ wcwidth = "*" [[package]] name = "psutil" -version = "5.9.7" +version = "5.9.8" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, ] [package.extras] @@ -2594,6 +2664,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -2808,16 +2880,43 @@ pyyaml = "*" [package.extras] extra = ["pygments (>=2.12)"] +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -2852,18 +2951,18 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-describe" -version = "2.1.0" +version = "2.2.0" description = "Describe-style plugin for pytest" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-describe-2.1.0.tar.gz", hash = "sha256:0630c95ac4942ab8dcd8e766236f86436b4984896db0c059fc234fef66fe9732"}, - {file = "pytest_describe-2.1.0-py3-none-any.whl", hash = "sha256:3ea587839363a91ea24e35e442dae46b56bd91d670e63b755e002b0adfc7a7b2"}, + {file = "pytest-describe-2.2.0.tar.gz", hash = "sha256:39bb05eb90f2497d9ca342ef9a0b7fa5bada7e58505aec33f66d661d631955b7"}, + {file = "pytest_describe-2.2.0-py3-none-any.whl", hash = "sha256:bd9e2c73acb4b9522a8400823d98f5b6a081667d3bfd7243a8598336896b544d"}, ] [package.dependencies] -pytest = ">=4.6,<8" +pytest = ">=4.6,<9" [[package]] name = "pytest-expecter" @@ -2908,14 +3007,14 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "1.0.0" +version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, ] [package.extras] @@ -2935,14 +3034,14 @@ files = [ [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -2998,7 +3097,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3006,15 +3104,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3031,7 +3122,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3039,7 +3129,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3231,14 +3320,14 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" [[package]] name = "referencing" -version = "0.32.0" +version = "0.33.0" description = "JSON Referencing + Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.32.0-py3-none-any.whl", hash = "sha256:bdcd3efb936f82ff86f993093f6da7435c7de69a3b3a5a06678a6050184bee99"}, - {file = "referencing-0.32.0.tar.gz", hash = "sha256:689e64fe121843dcfd57b71933318ef1f91188ffb45367332700a86ac8fd6161"}, + {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, + {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, ] [package.dependencies] @@ -3296,111 +3385,111 @@ files = [ [[package]] name = "rpds-py" -version = "0.15.2" +version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:337a8653fb11d2fbe7157c961cc78cb3c161d98cf44410ace9a3dc2db4fad882"}, - {file = "rpds_py-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:813a65f95bfcb7c8f2a70dd6add9b51e9accc3bdb3e03d0ff7a9e6a2d3e174bf"}, - {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:082e0e55d73690ffb4da4352d1b5bbe1b5c6034eb9dc8c91aa2a3ee15f70d3e2"}, - {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5595c80dd03d7e6c6afb73f3594bf3379a7d79fa57164b591d012d4b71d6ac4c"}, - {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb10bb720348fe1647a94eb605accb9ef6a9b1875d8845f9e763d9d71a706387"}, - {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53304cc14b1d94487d70086e1cb0cb4c29ec6da994d58ae84a4d7e78c6a6d04d"}, - {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d64a657de7aae8db2da60dc0c9e4638a0c3893b4d60101fd564a3362b2bfeb34"}, - {file = "rpds_py-0.15.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ee40206d1d6e95eaa2b7b919195e3689a5cf6ded730632de7f187f35a1b6052c"}, - {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1607cda6129f815493a3c184492acb5ae4aa6ed61d3a1b3663aa9824ed26f7ac"}, - {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3e6e2e502c4043c52a99316d89dc49f416acda5b0c6886e0dd8ea7bb35859e8"}, - {file = "rpds_py-0.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:044f6f46d62444800402851afa3c3ae50141f12013060c1a3a0677e013310d6d"}, - {file = "rpds_py-0.15.2-cp310-none-win32.whl", hash = "sha256:c827a931c6b57f50f1bb5de400dcfb00bad8117e3753e80b96adb72d9d811514"}, - {file = "rpds_py-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3bbc89ce2a219662ea142f0abcf8d43f04a41d5b1880be17a794c39f0d609cb0"}, - {file = "rpds_py-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:1fd0f0b1ccd7d537b858a56355a250108df692102e08aa2036e1a094fd78b2dc"}, - {file = "rpds_py-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b414ef79f1f06fb90b5165db8aef77512c1a5e3ed1b4807da8476b7e2c853283"}, - {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c31272c674f725dfe0f343d73b0abe8c878c646967ec1c6106122faae1efc15b"}, - {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6945c2d61c42bb7e818677f43638675b8c1c43e858b67a96df3eb2426a86c9d"}, - {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02744236ac1895d7be837878e707a5c35fb8edc5137602f253b63623d7ad5c8c"}, - {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2181e86d4e1cdf49a7320cb72a36c45efcb7670d0a88f09fd2d3a7967c0540fd"}, - {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a8ff8e809da81363bffca2b965cb6e4bf6056b495fc3f078467d1f8266fe27f"}, - {file = "rpds_py-0.15.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97532802f14d383f37d603a56e226909f825a83ff298dc1b6697de00d2243999"}, - {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:13716e53627ad97babf72ac9e01cf9a7d4af2f75dd5ed7b323a7a9520e948282"}, - {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2f1f295a5c28cfa74a7d48c95acc1c8a7acd49d7d9072040d4b694fe11cd7166"}, - {file = "rpds_py-0.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8ec464f20fe803ae00419bd1610934e3bda963aeba1e6181dfc9033dc7e8940c"}, - {file = "rpds_py-0.15.2-cp311-none-win32.whl", hash = "sha256:b61d5096e75fd71018b25da50b82dd70ec39b5e15bb2134daf7eb7bbbc103644"}, - {file = "rpds_py-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:9d41ebb471a6f064c0d1c873c4f7dded733d16ca5db7d551fb04ff3805d87802"}, - {file = "rpds_py-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:13ff62d3561a23c17341b4afc78e8fcfd799ab67c0b1ca32091d71383a98ba4b"}, - {file = "rpds_py-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b70b45a40ad0798b69748b34d508259ef2bdc84fb2aad4048bc7c9cafb68ddb3"}, - {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4ecbba7efd82bd2a4bb88aab7f984eb5470991c1347bdd1f35fb34ea28dba6e"}, - {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9d38494a8d21c246c535b41ecdb2d562c4b933cf3d68de03e8bc43a0d41be652"}, - {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13152dfe7d7c27c40df8b99ac6aab12b978b546716e99f67e8a67a1d441acbc3"}, - {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:164fcee32f15d04d61568c9cb0d919e37ff3195919cd604039ff3053ada0461b"}, - {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a5122b17a4faf5d7a6d91fa67b479736c0cacc7afe791ddebb7163a8550b799"}, - {file = "rpds_py-0.15.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:46b4f3d47d1033db569173be62365fbf7808c2bd3fb742314d251f130d90d44c"}, - {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c61e42b4ceb9759727045765e87d51c1bb9f89987aca1fcc8a040232138cad1c"}, - {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d2aa3ca9552f83b0b4fa6ca8c6ce08da6580f37e3e0ab7afac73a1cfdc230c0e"}, - {file = "rpds_py-0.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec19e823b4ccd87bd69e990879acbce9e961fc7aebe150156b8f4418d4b27b7f"}, - {file = "rpds_py-0.15.2-cp312-none-win32.whl", hash = "sha256:afeabb382c1256a7477b739820bce7fe782bb807d82927102cee73e79b41b38b"}, - {file = "rpds_py-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:422b0901878a31ef167435c5ad46560362891816a76cc0d150683f3868a6f0d1"}, - {file = "rpds_py-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:baf744e5f9d5ee6531deea443be78b36ed1cd36c65a0b95ea4e8d69fa0102268"}, - {file = "rpds_py-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e072f5da38d6428ba1fc1115d3cc0dae895df671cb04c70c019985e8c7606be"}, - {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f138f550b83554f5b344d6be35d3ed59348510edc3cb96f75309db6e9bfe8210"}, - {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2a4cd924d0e2f4b1a68034abe4cadc73d69ad5f4cf02db6481c0d4d749f548f"}, - {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5eb05b654a41e0f81ab27a7c3e88b6590425eb3e934e1d533ecec5dc88a6ffff"}, - {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ee066a64f0d2ba45391cac15b3a70dcb549e968a117bd0500634754cfe0e5fc"}, - {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51a899792ee2c696072791e56b2020caff58b275abecbc9ae0cb71af0645c95"}, - {file = "rpds_py-0.15.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac2ac84a4950d627d84b61f082eba61314373cfab4b3c264b62efab02ababe83"}, - {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:62b292fff4739c6be89e6a0240c02bda5a9066a339d90ab191cf66e9fdbdc193"}, - {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:98ee201a52a7f65608e5494518932e1473fd43535f12cade0a1b4ab32737fe28"}, - {file = "rpds_py-0.15.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3d40fb3ca22e3d40f494d577441b263026a3bd8c97ae6ce89b2d3c4b39ac9581"}, - {file = "rpds_py-0.15.2-cp38-none-win32.whl", hash = "sha256:30479a9f1fce47df56b07460b520f49fa2115ec2926d3b1303c85c81f8401ed1"}, - {file = "rpds_py-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:2df3d07a16a3bef0917b28cd564778fbb31f3ffa5b5e33584470e2d1b0f248f0"}, - {file = "rpds_py-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:56b51ba29a18e5f5810224bcf00747ad931c0716e3c09a76b4a1edd3d4aba71f"}, - {file = "rpds_py-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c11bc5814554b018f6c5d6ae0969e43766f81e995000b53a5d8c8057055e886"}, - {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2faa97212b0dc465afeedf49045cdd077f97be1188285e646a9f689cb5dfff9e"}, - {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:86c01299942b0f4b5b5f28c8701689181ad2eab852e65417172dbdd6c5b3ccc8"}, - {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd7d3608589072f63078b4063a6c536af832e76b0b3885f1bfe9e892abe6c207"}, - {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:938518a11780b39998179d07f31a4a468888123f9b00463842cd40f98191f4d3"}, - {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dccc623725d0b298f557d869a68496a2fd2a9e9c41107f234fa5f7a37d278ac"}, - {file = "rpds_py-0.15.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d46ee458452727a147d7897bb33886981ae1235775e05decae5d5d07f537695a"}, - {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d9d7ebcd11ea76ba0feaae98485cd8e31467c3d7985210fab46983278214736b"}, - {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8a5f574b92b3ee7d254e56d56e37ec0e1416acb1ae357c4956d76a1788dc58fb"}, - {file = "rpds_py-0.15.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3db0c998c92b909d7c90b66c965590d4f3cd86157176a6cf14aa1f867b77b889"}, - {file = "rpds_py-0.15.2-cp39-none-win32.whl", hash = "sha256:bbc7421cbd28b4316d1d017db338039a7943f945c6f2bb15e1439b14b5682d28"}, - {file = "rpds_py-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:1c24e30d720c0009b6fb2e1905b025da56103c70a8b31b99138e4ed1c2a6c5b0"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e6fcd0a0f62f2997107f758bb372397b8d5fd5f39cc6dcb86f7cb98a2172d6c"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d800a8e2ac62db1b9ea5d6d1724f1a93c53907ca061de4d05ed94e8dfa79050c"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e09d017e3f4d9bd7d17a30d3f59e4d6d9ba2d2ced280eec2425e84112cf623f"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b88c3ab98556bc351b36d6208a6089de8c8db14a7f6e1f57f82a334bd2c18f0b"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f333bfe782a2d05a67cfaa0cc9cd68b36b39ee6acfe099f980541ed973a7093"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b629db53fe17e6ce478a969d30bd1d0e8b53238c46e3a9c9db39e8b65a9ef973"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485fbdd23becb822804ed05622907ee5c8e8a5f43f6f43894a45f463b2217045"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:893e38d0f4319dfa70c0f36381a37cc418985c87b11d9784365b1fff4fa6973b"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8ffdeb7dbd0160d4e391e1f857477e4762d00aa2199c294eb95dfb9451aa1d9f"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:fc33267d58dfbb2361baed52668c5d8c15d24bc0372cecbb79fed77339b55e0d"}, - {file = "rpds_py-0.15.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2e7e5633577b3bd56bf3af2ef6ae3778bbafb83743989d57f0e7edbf6c0980e4"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8b9650f92251fdef843e74fc252cdfd6e3c700157ad686eeb0c6d7fdb2d11652"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:07a2e1d78d382f7181789713cdf0c16edbad4fe14fe1d115526cb6f0eef0daa3"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f9c5875515820633bd7709a25c3e60c1ea9ad1c5d4030ce8a8c203309c36fd"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:580182fa5b269c2981e9ce9764367cb4edc81982ce289208d4607c203f44ffde"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa1e626c524d2c7972c0f3a8a575d654a3a9c008370dc2a97e46abd0eaa749b9"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae9d83a81b09ce3a817e2cbb23aabc07f86a3abc664c613cd283ce7a03541e95"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9235be95662559141934fced8197de6fee8c58870f36756b0584424b6d708393"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a72e00826a2b032dda3eb25aa3e3579c6d6773d22d8446089a57a123481cc46c"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ab095edf1d840a6a6a4307e1a5b907a299a94e7b90e75436ee770b8c35d22a25"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b79c63d29101cbaa53a517683557bb550462394fb91044cc5998dd2acff7340"}, - {file = "rpds_py-0.15.2-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:911e600e798374c0d86235e7ef19109cf865d1336942d398ff313375a25a93ba"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3cd61e759c4075510052d1eca5cddbd297fe1164efec14ef1fce3f09b974dfe4"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d2ae79f31da5143e020a8d4fc74e1f0cbcb8011bdf97453c140aa616db51406"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e99d6510c8557510c220b865d966b105464740dcbebf9b79ecd4fbab30a13d9"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c43e1b89099279cc03eb1c725c5de12af6edcd2f78e2f8a022569efa639ada3"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac7187bee72384b9cfedf09a29a3b2b6e8815cc64c095cdc8b5e6aec81e9fd5f"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3423007fc0661827e06f8a185a3792c73dda41f30f3421562f210cf0c9e49569"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2974e6dff38afafd5ccf8f41cb8fc94600b3f4fd9b0a98f6ece6e2219e3158d5"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:93c18a1696a8e0388ed84b024fe1a188a26ba999b61d1d9a371318cb89885a8c"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd0841a586b7105513a7c8c3d5c276f3adc762a072d81ef7fae80632afad1e"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:709dc11af2f74ba89c68b1592368c6edcbccdb0a06ba77eb28c8fe08bb6997da"}, - {file = "rpds_py-0.15.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:fc066395e6332da1e7525d605b4c96055669f8336600bef8ac569d5226a7c76f"}, - {file = "rpds_py-0.15.2.tar.gz", hash = "sha256:373b76eeb79e8c14f6d82cb1d4d5293f9e4059baec6c1b16dca7ad13b6131b39"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, ] [[package]] @@ -3486,71 +3575,71 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.23" +version = "2.0.27" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:638c2c0b6b4661a4fd264f6fb804eccd392745c5887f9317feb64bb7cb03b3ea"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3b5036aa326dc2df50cba3c958e29b291a80f604b1afa4c8ce73e78e1c9f01d"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787af80107fb691934a01889ca8f82a44adedbf5ef3d6ad7d0f0b9ac557e0c34"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c14eba45983d2f48f7546bb32b47937ee2cafae353646295f0e99f35b14286ab"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0666031df46b9badba9bed00092a1ffa3aa063a5e68fa244acd9f08070e936d3"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89a01238fcb9a8af118eaad3ffcc5dedaacbd429dc6fdc43fe430d3a941ff965"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win32.whl", hash = "sha256:cabafc7837b6cec61c0e1e5c6d14ef250b675fa9c3060ed8a7e38653bd732ff8"}, - {file = "SQLAlchemy-2.0.23-cp310-cp310-win_amd64.whl", hash = "sha256:87a3d6b53c39cd173990de2f5f4b83431d534a74f0e2f88bd16eabb5667e65c6"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d5578e6863eeb998980c212a39106ea139bdc0b3f73291b96e27c929c90cd8e1"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62d9e964870ea5ade4bc870ac4004c456efe75fb50404c03c5fd61f8bc669a72"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c80c38bd2ea35b97cbf7c21aeb129dcbebbf344ee01a7141016ab7b851464f8e"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75eefe09e98043cff2fb8af9796e20747ae870c903dc61d41b0c2e55128f958d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd45a5b6c68357578263d74daab6ff9439517f87da63442d244f9f23df56138d"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a86cb7063e2c9fb8e774f77fbf8475516d270a3e989da55fa05d08089d77f8c4"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win32.whl", hash = "sha256:b41f5d65b54cdf4934ecede2f41b9c60c9f785620416e8e6c48349ab18643855"}, - {file = "SQLAlchemy-2.0.23-cp311-cp311-win_amd64.whl", hash = "sha256:9ca922f305d67605668e93991aaf2c12239c78207bca3b891cd51a4515c72e22"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0f7fb0c7527c41fa6fcae2be537ac137f636a41b4c5a4c58914541e2f436b45"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7c424983ab447dab126c39d3ce3be5bee95700783204a72549c3dceffe0fc8f4"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f508ba8f89e0a5ecdfd3761f82dda2a3d7b678a626967608f4273e0dba8f07ac"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6463aa765cf02b9247e38b35853923edbf2f6fd1963df88706bc1d02410a5577"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e599a51acf3cc4d31d1a0cf248d8f8d863b6386d2b6782c5074427ebb7803bda"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd54601ef9cc455a0c61e5245f690c8a3ad67ddb03d3b91c361d076def0b4c60"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win32.whl", hash = "sha256:42d0b0290a8fb0165ea2c2781ae66e95cca6e27a2fbe1016ff8db3112ac1e846"}, - {file = "SQLAlchemy-2.0.23-cp312-cp312-win_amd64.whl", hash = "sha256:227135ef1e48165f37590b8bfc44ed7ff4c074bf04dc8d6f8e7f1c14a94aa6ca"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:14aebfe28b99f24f8a4c1346c48bc3d63705b1f919a24c27471136d2f219f02d"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e983fa42164577d073778d06d2cc5d020322425a509a08119bdcee70ad856bf"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0dc9031baa46ad0dd5a269cb7a92a73284d1309228be1d5935dac8fb3cae24"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5f94aeb99f43729960638e7468d4688f6efccb837a858b34574e01143cf11f89"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:63bfc3acc970776036f6d1d0e65faa7473be9f3135d37a463c5eba5efcdb24c8"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win32.whl", hash = "sha256:f48ed89dd11c3c586f45e9eec1e437b355b3b6f6884ea4a4c3111a3358fd0c18"}, - {file = "SQLAlchemy-2.0.23-cp37-cp37m-win_amd64.whl", hash = "sha256:1e018aba8363adb0599e745af245306cb8c46b9ad0a6fc0a86745b6ff7d940fc"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:64ac935a90bc479fee77f9463f298943b0e60005fe5de2aa654d9cdef46c54df"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c4722f3bc3c1c2fcc3702dbe0016ba31148dd6efcd2a2fd33c1b4897c6a19693"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af79c06825e2836de21439cb2a6ce22b2ca129bad74f359bddd173f39582bf5"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:683ef58ca8eea4747737a1c35c11372ffeb84578d3aab8f3e10b1d13d66f2bc4"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d4041ad05b35f1f4da481f6b811b4af2f29e83af253bf37c3c4582b2c68934ab"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aeb397de65a0a62f14c257f36a726945a7f7bb60253462e8602d9b97b5cbe204"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win32.whl", hash = "sha256:42ede90148b73fe4ab4a089f3126b2cfae8cfefc955c8174d697bb46210c8306"}, - {file = "SQLAlchemy-2.0.23-cp38-cp38-win_amd64.whl", hash = "sha256:964971b52daab357d2c0875825e36584d58f536e920f2968df8d581054eada4b"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:616fe7bcff0a05098f64b4478b78ec2dfa03225c23734d83d6c169eb41a93e55"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e680527245895aba86afbd5bef6c316831c02aa988d1aad83c47ffe92655e74"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9585b646ffb048c0250acc7dad92536591ffe35dba624bb8fd9b471e25212a35"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4895a63e2c271ffc7a81ea424b94060f7b3b03b4ea0cd58ab5bb676ed02f4221"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc1d21576f958c42d9aec68eba5c1a7d715e5fc07825a629015fe8e3b0657fb0"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:967c0b71156f793e6662dd839da54f884631755275ed71f1539c95bbada9aaab"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win32.whl", hash = "sha256:0a8c6aa506893e25a04233bc721c6b6cf844bafd7250535abb56cb6cc1368884"}, - {file = "SQLAlchemy-2.0.23-cp39-cp39-win_amd64.whl", hash = "sha256:f3420d00d2cb42432c1d0e44540ae83185ccbbc67a6054dcc8ab5387add6620b"}, - {file = "SQLAlchemy-2.0.23-py3-none-any.whl", hash = "sha256:31952bbc527d633b9479f5f81e8b9dfada00b91d6baba021a869095f1a97006d"}, - {file = "SQLAlchemy-2.0.23.tar.gz", hash = "sha256:c1bda93cbbe4aa2aa0aa8655c5aeda505cd219ff3e8da91d1d329e143e4aff69"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, + {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, + {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, ] [package.dependencies] greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} -typing-extensions = ">=4.2.0" +typing-extensions = ">=4.6.0" [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] @@ -3560,7 +3649,7 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)"] mysql = ["mysqlclient (>=1.4.0)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=8)"] +oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] @@ -3570,7 +3659,7 @@ postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy-json" @@ -3698,14 +3787,14 @@ files = [ [[package]] name = "traitlets" -version = "5.14.0" +version = "5.14.1" description = "Traitlets Python configuration system" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, - {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, ] [package.extras] @@ -3714,14 +3803,14 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-python-dateutil" -version = "2.8.19.14" +version = "2.8.19.20240106" description = "Typing stubs for python-dateutil" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.8.19.14.tar.gz", hash = "sha256:1f4f10ac98bb8b16ade9dbee3518d9ace017821d94b057a425b069f834737f4b"}, - {file = "types_python_dateutil-2.8.19.14-py3-none-any.whl", hash = "sha256:f977b8de27787639986b4e28963263fd0e5158942b3ecef91b9335c130cb1ce9"}, + {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, + {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, ] [[package]] @@ -3753,14 +3842,14 @@ files = [ [[package]] name = "types-waitress" -version = "2.1.4.9" +version = "2.1.4.20240106" description = "Typing stubs for waitress" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-waitress-2.1.4.9.tar.gz", hash = "sha256:7105eb78110c0c123c9bed7c8eb41c040490d6b0d019bd53add5f9406bba10f1"}, - {file = "types_waitress-2.1.4.9-py3-none-any.whl", hash = "sha256:ecc432cad266d1f434008bd790eb63554808a60133a772331f67454a4960255c"}, + {file = "types-waitress-2.1.4.20240106.tar.gz", hash = "sha256:65a7240a0771032b2aa073d09f63020aa594c7d84e05b6fefe354ef6f2c47fc2"}, + {file = "types_waitress-2.1.4.20240106-py3-none-any.whl", hash = "sha256:0a608efb7769cff76affa2c9173e5081be95b5dc137677e43fbd826bbf333fe4"}, ] [[package]] @@ -3854,39 +3943,41 @@ testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -3894,14 +3985,14 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.12" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" category = "dev" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, - {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] @@ -3969,14 +4060,14 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "widgetsnbextension" -version = "4.0.9" +version = "4.0.10" description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.9-py3-none-any.whl", hash = "sha256:91452ca8445beb805792f206e560c1769284267a30ceb1cec9f5bcc887d15175"}, - {file = "widgetsnbextension-4.0.9.tar.gz", hash = "sha256:3c1f5e46dc1166dfd40a42d685e6a51396fd34ff878742a3e47c6f0cc4a2a385"}, + {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, + {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, ] [[package]] @@ -4078,4 +4169,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "345790662c5bcdbe8b1e4ab4239ab35b3a528be2b0c46db2d2dcdc0276e26cf7" +content-hash = "79f71b90af5571b195f90783411304f0617b43904fc28186b5b3cd37b073b102" diff --git a/pyproject.toml b/pyproject.toml index 988b3077..5bd6da1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,6 @@ repository = "https://github.com/AI-READI/pyfairdatatools" keywords = [] classifiers = [ - # TODO: update this list to match your application: https://pypi.org/pypi?%3Aaction=list_classifiers "Development Status :: 1 - Planning", "Intended Audience :: Developers", "License :: OSI Approved", @@ -24,7 +23,7 @@ classifiers = [ "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.10", ] @@ -55,6 +54,10 @@ redis = "^5.0.1" # Auth flask-bcrypt = "^1.0.1" +cryptography = "^3.4.8" +bcrypt = "^4.1.0" +paramiko = "^3.4.0" +cffi = "^1.16.0" pyjwt = "^2.8.0" email-validator = "^2.0.0.post2" From 28089fa0fa146fd070801c72168bedd143540a6c Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 16 Feb 2024 17:45:31 -0800 Subject: [PATCH 424/505] =?UTF-8?q?=E2=9C=A8feat:=20redcap=20api=20updates?= =?UTF-8?q?,=20dashboard=20wip?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 68 ++-- apis/redcap.py | 364 ++++++++++++------ app.py | 45 ++- dev-docker-compose.yaml | 2 +- model/__init__.py | 8 +- model/study.py | 8 +- ...roject_dashboard.py => study_dashboard.py} | 61 +-- ..._redcap_project_api.py => study_redcap.py} | 51 +-- modules/etl/transforms/redcap_transform.py | 6 +- sql/init.sql | 95 +++-- sql/init_timezones.sql | 85 ++-- sql/specific_tables.sql | 26 +- 12 files changed, 486 insertions(+), 333 deletions(-) rename model/{study_redcap_project_dashboard.py => study_dashboard.py} (58%) rename model/{study_redcap_project_api.py => study_redcap.py} (53%) diff --git a/apis/dashboard.py b/apis/dashboard.py index 9f9c3a42..f5791da5 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -185,10 +185,10 @@ class RedcapProjectDashboards(Resource): def get(self, study_id: int): """Get all REDCap project dashboards""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("viewer", study): + if not is_granted("view", study): return "Access denied, you can not modify", 403 - redcap_project_dashboards_query = ( - model.StudyRedcapProjectDashboard.query.filter_by(study=study) + redcap_project_dashboards_query = model.StudyDashboard.query.filter_by( + study=study ) redcap_project_dashboards: List[Dict[str, Any]] = [ redcap_project_dashboard.to_dict() @@ -206,7 +206,7 @@ class AddRedcapProjectDashboard(Resource): def post(self, study_id: int): """Create REDCap project dashboard""" study = model.Study.query.get(study_id) - if is_granted("add_dashboard", study): + if not is_granted("add_dashboard", study): return "Access denied, you can not modify", 403 # Schema validation schema = { @@ -286,14 +286,14 @@ def post(self, study_id: int): {data['dashboard_name']}""", 400, ) - connect_redcap_project_dashboard_data = ( - model.StudyRedcapProjectDashboard.from_data(study, data) + connect_redcap_project_dashboard_data = model.StudyDashboard.from_data( + study, data ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 @@ -306,18 +306,18 @@ class RedcapProjectDashboardConnector(Resource): def get(self, study_id: int): """Get REDCap project dashboard connector""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("viewer", study): + if not is_granted("view", study): return "Access denied, you can not get this dashboard", 403 # Get Dashboard Connector dashboard_id = dashboard_parser.parse_args()["dashboard_id"] redcap_project_dashboard_connector_query: Any = model.db.session.query( - model.StudyRedcapProjectDashboard + model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[str, Any] = ( - redcap_project_dashboard_connector_query.to_dict() - ) + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() return redcap_project_dashboard_connector, 201 @@ -332,7 +332,7 @@ def get(self, study_id: int): """Get REDCap project dashboard""" model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) - if is_granted("viewer", study): + if not is_granted("view", study): return "Access denied, you can not get this dashboard", 403 # Get Dashboard @@ -347,17 +347,17 @@ def get(self, study_id: int): return cached_redcap_project_dashboard, 201 redcap_project_dashboard_query: Any = model.db.session.query( - model.StudyRedcapProjectDashboard + model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Get REDCap Project project_id = redcap_project_dashboard["project_id"] - redcap_project_view_query: Any = model.db.session.query( - model.StudyRedcapProjectApi - ).get(project_id) + redcap_project_view_query: Any = model.db.session.query(model.StudyRedcap).get( + project_id + ) redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() # Set report_ids for ETL @@ -367,9 +367,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -418,7 +418,7 @@ class EditRedcapProjectDashboard(Resource): def put(self, study_id: int): """Update REDCap project dashboard""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("update_dashboard", study): + if not is_granted("update_dashboard", study): return "Access denied, you can not modify this dashboard", 403 # Schema validation schema = { @@ -509,17 +509,15 @@ def put(self, study_id: int): dashboard_id = data["dashboard_id"] - redcap_project_dashboard_query = model.StudyRedcapProjectDashboard.query.get( - dashboard_id - ) + redcap_project_dashboard_query = model.StudyDashboard.query.get(dashboard_id) if redcap_project_dashboard_query is None: return "An error occurred while updating the dashboard", 500 redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") @@ -536,13 +534,11 @@ class DeleteRedcapProjectDashboard(Resource): def delete(self, study_id: int): """Delete REDCap project dashboard""" study = model.Study.query.get(study_id) - if is_granted("delete_dashboard", study): + if not is_granted("delete_dashboard", study): return "Access denied, you can not delete this redcap project", 403 dashboard_id = dashboard_parser.parse_args()["dashboard_id"] - model.StudyRedcapProjectDashboard.query.filter_by( - dashboard_id=dashboard_id - ).delete() + model.StudyDashboard.query.filter_by(dashboard_id=dashboard_id).delete() model.db.session.commit() return 204 diff --git a/apis/redcap.py b/apis/redcap.py index c1551e1b..243b464a 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -3,7 +3,7 @@ from typing import Any, Union from flask import request -from flask_restx import Namespace, Resource, fields, reqparse +from flask_restx import Namespace, Resource, fields from jsonschema import ValidationError, validate import model @@ -16,78 +16,64 @@ "RedcapProjectAPI", { "study_id": fields.String(required=True, description="Study ID"), - "project_id": fields.String( - required=True, description="REDCap project ID (pid)" - ), - "project_title": fields.String( - required=True, description="REDCap project title" - ), - "project_api_url": fields.String( - required=True, description="REDCap project API url" - ), - "project_api_active": fields.Boolean( + "id": fields.String(required=True, description="REDCap project ID"), + "title": fields.String(required=True, description="REDCap project title"), + "api_pid": fields.String(required=True, description="REDCap project PID"), + "api_url": fields.String(required=True, description="REDCap project API url"), + "api_active": fields.Boolean( required=True, description="REDCap project is active" ), }, ) -redcap_project_api_model = api.model( +redcap_api_model = api.model( "RedcapProjectAPI", { "study_id": fields.String(required=True, description="Study ID"), - "project_id": fields.String( - required=True, description="REDCap project ID (pid)" - ), - "project_title": fields.String( - required=True, description="REDCap project title" - ), - "project_api_key": fields.String( - required=True, description="REDCap project API key" - ), - "project_api_url": fields.String( - required=True, description="REDCap project API url" - ), - "project_api_active": fields.Boolean( + "id": fields.String(required=True, description="REDCap project ID"), + "title": fields.String(required=True, description="REDCap project title"), + "api_pid": fields.String(required=True, description="REDCap project PID"), + "api_key": fields.String(required=True, description="REDCap project API key"), + "api_url": fields.String(required=True, description="REDCap project API url"), + "api_active": fields.Boolean( required=True, description="REDCap project is active" ), }, ) -project_parser = reqparse.RequestParser() -project_parser.add_argument("project_id", type=str, help="REDCap project ID (pid)") +# project_parser = reqparse.RequestParser() +# project_parser.add_argument("api_pid", type=str, help="REDCap project ID (pid)") -@api.route("/study//redcap/all") -class RedcapProjectAPIs(Resource): - @api.doc("redcap_project_apis") +@api.route("/study//redcap") +class RedcapProjectAPILink(Resource): + @api.doc("Get all REDCap project API links") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model, as_list=True) - def get(self, study_id: int): + def get(self, study_id: str): """Get all REDCap project API links""" study = model.Study.query.get(study_id) - if is_granted("viewer", study): + if not is_granted("view", study): return ( "Access denied, you can not view the redcap projects for this study", 403, ) - redcap_project_views = model.StudyRedcapProjectApi.query.filter_by(study=study) + redcap_project_views = model.StudyRedcap.query.filter_by(study=study) redcap_project_views = [ redcap_project_view.to_dict() for redcap_project_view in redcap_project_views ] return redcap_project_views, 201 - -@api.route("/study//redcap/add") -class AddRedcapProjectAPI(Resource): + @api.doc("Create a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_view_model) - def post(self, study_id: int): + @api.marshal_with(redcap_api_model) + def post(self, study_id: str): """Create REDCap project API link""" study = model.Study.query.get(study_id) - if is_granted("add_redcap", study): + if not is_granted("add_redcap", study): return "Access denied, you can not create a redcap project", 403 # Schema validation data: Union[Any, dict] = request.json @@ -95,18 +81,18 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ - "project_title", - "project_id", - "project_api_url", - "project_api_key", - "project_api_active", + "title", + "api_pid", + "api_url", + "api_key", + "api_active", ], "properties": { - "project_title": {"type": "string", "minLength": 1}, - "project_id": {"type": "string", "minLength": 5}, - "project_api_url": {"type": "string", "minLength": 1}, - "project_api_key": {"type": "string", "minLength": 32}, - "project_api_active": {"type": "boolean"}, + "title": {"type": "string", "minLength": 1}, + "api_pid": {"type": "string", "minLength": 5}, + "api_url": {"type": "string", "minLength": 1}, + "api_key": {"type": "string", "minLength": 32}, + "api_active": {"type": "boolean"}, }, } @@ -115,73 +101,145 @@ def post(self, study_id: int): except ValidationError as e: return e.message, 400 - if len(data["project_title"]) < 1: + if len(data["title"]) < 1: return ( - f"""redcap project_title is required for redcap access: - {data['project_title']}""", + f"""redcap title is required for redcap access: + {data['title']}""", 400, ) - if len(data["project_id"]) < 1: + if len(data["api_pid"]) < 1: return ( - f"""redcap project_id is required for redcap access: - {data['project_id']}""", + f"""redcap api_pid is required for redcap access: + {data['api_pid']}""", 400, ) - if len(data["project_api_url"]) < 1: + if len(data["api_url"]) < 1: return ( - f"""redcap project_api_url is required for redcap access: - {data['project_api_url']}""", + f"""redcap api_url is required for redcap access: + {data['api_url']}""", 400, ) - if len(data["project_api_key"]) < 1: + if len(data["api_key"]) < 1: return ( - f"""redcap project_api_key is required for redcap access: - {data['project_api_key']}""", + f"""redcap api_key is required for redcap access: + {data['api_key']}""", 400, ) - if not isinstance(data["project_api_active"], bool): + if not isinstance(data["api_active"], bool): return ( - f"""redcap project_api_active is required for redcap access: - {data['project_api_active']}""", + f"""redcap api_active is required for redcap access: + {data['api_active']}""", 400, ) - add_redcap_project_api = model.StudyRedcapProjectApi.from_data(study, data) - model.db.session.add(add_redcap_project_api) + add_redcap_api = model.StudyRedcap.from_data(study, data) + model.db.session.add(add_redcap_api) model.db.session.commit() - add_redcap_project_api = add_redcap_project_api.to_dict() - return add_redcap_project_api, 201 + add_redcap_api = add_redcap_api.to_dict() + return add_redcap_api, 201 -@api.route("/study//redcap") +# @api.route("/study//redcap/add") +# class AddRedcapProjectAPI(Resource): +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_view_model) +# def post(self, study_id: int): +# """Create REDCap project API link""" +# study = model.Study.query.get(study_id) +# if not is_granted("add_redcap", study): +# return "Access denied, you can not create a redcap project", 403 +# # Schema validation +# data: Union[Any, dict] = request.json +# schema = { +# "type": "object", +# "additionalProperties": False, +# "required": [ +# "title", +# "api_pid", +# "api_url", +# "api_key", +# "api_active", +# ], +# "properties": { +# "title": {"type": "string", "minLength": 1}, +# "api_pid": {"type": "string", "minLength": 5}, +# "api_url": {"type": "string", "minLength": 1}, +# "api_key": {"type": "string", "minLength": 32}, +# "api_active": {"type": "boolean"}, +# }, +# } + +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 + +# if len(data["title"]) < 1: +# return ( +# f"""redcap title is required for redcap access: +# {data['title']}""", +# 400, +# ) +# if len(data["api_pid"]) < 1: +# return ( +# f"""redcap api_pid is required for redcap access: +# {data['api_pid']}""", +# 400, +# ) +# if len(data["api_url"]) < 1: +# return ( +# f"""redcap api_url is required for redcap access: +# {data['api_url']}""", +# 400, +# ) +# if len(data["api_key"]) < 1: +# return ( +# f"""redcap api_key is required for redcap access: +# {data['api_key']}""", +# 400, +# ) +# if not isinstance(data["api_active"], bool): +# return ( +# f"""redcap api_active is required for redcap access: +# {data['api_active']}""", +# 400, +# ) + +# add_redcap_api = model.StudyRedcap.from_data(study, data) +# model.db.session.add(add_redcap_api) +# model.db.session.commit() +# add_redcap_api = add_redcap_api.to_dict() +# return add_redcap_api, 201 + + +@api.route("/study//redcap/") class RedcapProjectAPI(Resource): - @api.doc(parser=project_parser) + # Get a REDCap API Link + @api.doc("Get a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) - def get(self, study_id: int): + def get(self, study_id: str, redcap_id: str): """Get REDCap project API link""" study = model.db.session.query(model.Study).get(study_id) - if is_granted("viewer", study): + if not is_granted("view", study): return "Access denied, you can not get this redcap project", 403 - project_id = project_parser.parse_args()["project_id"] - redcap_project_view: Any = model.db.session.query( - model.StudyRedcapProjectApi - ).get(project_id) + redcap_project_view: Any = model.db.session.query(model.StudyRedcap).get( + redcap_id + ) redcap_project_view = redcap_project_view.to_dict() return redcap_project_view, 201 - -@api.route("/study//redcap/edit") -class EditRedcapProjectAPI(Resource): - @api.doc(parser=project_parser) + # Update REDCap API Link + @api.doc("Update a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) - def put(self, study_id: int): + def put(self, study_id: str, redcap_id: str): """Update REDCap project API link""" study = model.Study.query.get(study_id) - if is_granted("update_redcap", study): + if not is_granted("update_redcap", study): return "Access denied, you can not modify this redcap project", 403 # Schema validation data: Union[Any, dict] = request.json @@ -189,16 +247,16 @@ def put(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ - "project_id", - "project_title", - "project_api_url", - "project_api_active", + "title", + "api_pid", + "api_url", + "api_active", ], "properties": { - "project_id": {"type": "string", "minLength": 1, "maxLength": 12}, - "project_title": {"type": "string", "minLength": 1}, - "project_api_url": {"type": "string", "minLength": 1}, - "project_api_active": {"type": "boolean"}, + "title": {"type": "string", "minLength": 1}, + "api_pid": {"type": "string", "minLength": 5}, + "api_url": {"type": "string", "minLength": 1}, + "api_active": {"type": "boolean"}, }, } try: @@ -206,51 +264,131 @@ def put(self, study_id: int): except ValidationError as e: return e.message, 400 - if len(data["project_id"]) < 1: + if len(data["title"]) < 1: return ( - f"""redcap project_id is required for redcap access: - {data['project_id']}""", + f"""redcap title is required for redcap access: + {data['title']}""", 400, ) - if len(data["project_title"]) < 1: + if len(data["api_pid"]) < 1: return ( - f"""redcap project_title is required for redcap access: - {data['project_title']}""", + f"""redcap api_pid is required for redcap access and must be concordant: + {data['api_pid']}""", 400, ) - if len(data["project_api_url"]) < 1: + if len(data["api_url"]) < 1: return ( - f"""redcap project_api_url is required for redcap access: - {data['project_api_url']}""", + f"""redcap api_url is required for redcap access: + {data['api_url']}""", 400, ) - if not isinstance(data["project_api_active"], bool): + if not isinstance(data["api_active"], bool): return ( - f"""redcap project_api_active is required for redcap access: - {data['project_api_active']}""", + f"""redcap api_active is required for redcap access: + {data['api_active']}""", 400, ) - update_redcap_project_view = model.StudyRedcapProjectApi.query.get( - data["project_id"] - ) + + update_redcap_project_view = model.StudyRedcap.query.get(redcap_id) update_redcap_project_view.update(data) model.db.session.commit() update_redcap_project_view = update_redcap_project_view.to_dict() return update_redcap_project_view, 201 - -@api.route("/study//redcap/delete") -class DeleteRedcapProjectAPI(Resource): - @api.doc(parser=project_parser) + # Delete REDCap API Link + @api.doc("Delete a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_view_model) - def delete(self, study_id: int): + def delete(self, study_id: str, redcap_id: str): """Delete REDCap project API link""" study = model.Study.query.get(study_id) - if is_granted("delete_redcap", study): + if not is_granted("delete_redcap", study): return "Access denied, you can not delete this redcap project", 403 - project_id = project_parser.parse_args()["project_id"] - model.StudyRedcapProjectApi.query.filter_by(project_id=project_id).delete() + model.StudyRedcap.query.filter_by(id=redcap_id).delete() model.db.session.commit() return 204 + + +# @api.route("/study//redcap") +# class EditRedcapProjectAPI(Resource): +# @api.doc(parser=project_parser) +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_view_model) +# def put(self, study_id: int): +# """Update REDCap project API link""" +# study = model.Study.query.get(study_id) +# if not is_granted("update_redcap", study): +# return "Access denied, you can not modify this redcap project", 403 +# # Schema validation +# data: Union[Any, dict] = request.json +# schema = { +# "type": "object", +# "additionalProperties": False, +# "required": [ +# "api_pid", +# "title", +# "api_url", +# "api_active", +# ], +# "properties": { +# "api_pid": {"type": "string", "minLength": 1, "maxLength": 12}, +# "title": {"type": "string", "minLength": 1}, +# "api_url": {"type": "string", "minLength": 1}, +# "api_active": {"type": "boolean"}, +# }, +# } +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 + +# if len(data["api_pid"]) < 1: +# return ( +# f"""redcap api_pid is required for redcap access: +# {data['api_pid']}""", +# 400, +# ) +# if len(data["title"]) < 1: +# return ( +# f"""redcap title is required for redcap access: +# {data['title']}""", +# 400, +# ) +# if len(data["api_url"]) < 1: +# return ( +# f"""redcap api_url is required for redcap access: +# {data['api_url']}""", +# 400, +# ) +# if not isinstance(data["api_active"], bool): +# return ( +# f"""redcap api_active is required for redcap access: +# {data['api_active']}""", +# 400, +# ) +# update_redcap_project_view = model.StudyRedcap.query.get( +# data["api_pid"] +# ) +# update_redcap_project_view.update(data) +# model.db.session.commit() +# update_redcap_project_view = update_redcap_project_view.to_dict() +# return update_redcap_project_view, 201 + + +# @api.route("/study//redcap") +# class DeleteRedcapProjectAPI(Resource): +# @api.doc(parser=project_parser) +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_view_model) +# def delete(self, study_id: int): +# """Delete REDCap project API link""" +# study = model.Study.query.get(study_id) +# if not is_granted("delete_redcap", study): +# return "Access denied, you can not delete this redcap project", 403 +# api_pid = project_parser.parse_args()["api_pid"] +# model.StudyRedcap.query.filter_by(api_pid=api_pid).delete() +# model.db.session.commit() +# return 204 diff --git a/app.py b/app.py index db7a6ce9..5242c133 100644 --- a/app.py +++ b/app.py @@ -11,7 +11,7 @@ from flask_bcrypt import Bcrypt from flask_cors import CORS from growthbook import GrowthBook -from sqlalchemy import MetaData +from sqlalchemy import MetaData, inspect from waitress import serve import config @@ -105,16 +105,16 @@ def create_app(config_module=None, loglevel="INFO"): # CORS(app, resources={r"/*": {"origins": "*", "send_wildcard": True}}) - # @app.cli.command("create-schema") - # def create_schema(): - # """Create the database schema.""" - # engine = model.db.session.get_bind() - # metadata = MetaData() - # metadata.reflect(bind=engine) - # table_names = [table.name for table in metadata.tables.values()] - # if len(table_names) == 0: - # with engine.begin(): - # model.db.create_all() + @app.cli.command("create-schema") + def create_schema(): + """Create the database schema.""" + engine = model.db.session.get_bind() + metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + if len(table_names) == 0: + with engine.begin(): + model.db.create_all() @app.cli.command("destroy-schema") def destroy_schema(): @@ -135,12 +135,23 @@ def cycle_schema(): engine = model.db.session.get_bind() with engine.begin(): model.db.drop_all() - metadata = MetaData() - metadata.reflect(bind=engine) - table_names = [table.name for table in metadata.tables.values()] - if len(table_names) == 0: - with engine.begin(): - model.db.create_all() + model.db.create_all() + + @app.cli.command("inspect-schemas") + def inspect_schemas(): + """Print database schemas, tables, and columns to CLI.""" + engine = model.db.session.get_bind() + inspector = inspect(engine) + schemas = inspector.get_schema_names() + for schema in schemas: + print("-" * 32) + print(f"Schema: {schema}") + for table_name in inspector.get_table_names(schema=schema): + print(f"\n Table: {table_name}") + for column in inspector.get_columns(table_name, schema=schema): + print(f" Column: {column['name']}") + for k, v in column.items(): + print(f" {k:<16}{str(v):>16}") @app.before_request def on_before_request(): # pylint: disable = inconsistent-return-statements diff --git a/dev-docker-compose.yaml b/dev-docker-compose.yaml index 2879955f..41ec1c33 100644 --- a/dev-docker-compose.yaml +++ b/dev-docker-compose.yaml @@ -49,7 +49,7 @@ services: CACHE_PORT: 6379 CACHE_TYPE: RedisCache CACHE_URL: redis://127.0.0.1:6379 - CACHE_PREFIX: fairhub-io# + CACHE_PREFIX: $fairhub-io CACHE_TIMEOUT: 86400 CACHE_PASSWORD: development ports: diff --git a/model/__init__.py b/model/__init__.py index 9d9b5beb..0686115b 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -29,6 +29,7 @@ from .participant import Participant from .study import Study, StudyException from .study_contributor import StudyContributor +from .study_dashboard import StudyDashboard from .study_metadata.arm import Arm from .study_metadata.identifiers import Identifiers from .study_metadata.study_arm import StudyArm @@ -47,8 +48,7 @@ from .study_metadata.study_reference import StudyReference from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators from .study_metadata.study_status import StudyStatus -from .study_redcap_project_api import StudyRedcapProjectApi -from .study_redcap_project_dashboard import StudyRedcapProjectDashboard +from .study_redcap import StudyRedcap from .token_blacklist import TokenBlacklist from .user import User from .user_details import UserDetails @@ -94,8 +94,8 @@ "StudyLocation", "StudyOther", "StudyOverallOfficial", - "StudyRedcapProjectApi", - "StudyRedcapProjectDashboard", + "StudyRedcap", + "StudyDashboard", "StudyReference", "StudySponsorsCollaborators", "StudyStatus", diff --git a/model/study.py b/model/study.py index dae5cbe3..59e20332 100644 --- a/model/study.py +++ b/model/study.py @@ -98,12 +98,12 @@ def __init__(self): cascade="all, delete", ) # NOTE: Has not been tested - study_redcap_project_apis = db.relationship( - "StudyRedcapProjectApi", back_populates="study", cascade="all, delete" + study_redcap = db.relationship( + "StudyRedcap", back_populates="study", cascade="all, delete" ) # NOTE: Has not been tested - study_redcap_project_dashboards = db.relationship( - "StudyRedcapProjectDashboard", back_populates="study", cascade="all, delete" + study_dashboard = db.relationship( + "StudyDashboard", back_populates="study", cascade="all, delete" ) study_intervention = db.relationship( "StudyIntervention", diff --git a/model/study_redcap_project_dashboard.py b/model/study_dashboard.py similarity index 58% rename from model/study_redcap_project_dashboard.py rename to model/study_dashboard.py index 3df4c3c4..7a2dc53c 100644 --- a/model/study_redcap_project_dashboard.py +++ b/model/study_dashboard.py @@ -11,52 +11,60 @@ @dataclass -class StudyRedcapProjectDashboard(db.Model): # type: ignore +class StudyDashboard(db.Model): # type: ignore """ A Project Dashboard is associated with a REDCap Project, which is part of a study """ - __tablename__: str = "study_redcap_project_dashboard" - dashboard_id: str = db.Column(db.CHAR(36), primary_key=True) - dashboard_name: str = db.Column(db.String, nullable=False) - dashboard_modules: list[dict[str, (str | bool | int)]] = db.Column( + __tablename__: str = "study_dashboard" + # Columns + id: str = db.Column(db.CHAR(36), primary_key=True) + name: str = db.Column(db.String, nullable=True) + modules: list[dict[str, (str | bool | int)]] = db.Column( NestedMutableJson, nullable=True ) reports: list[dict[str, str]] = db.Column(NestedMutableJson, nullable=True) created_at: float = db.Column(db.BigInteger, nullable=False) updated_on: float = db.Column(db.BigInteger, nullable=False) - project_id: int = db.Column( - db.BigInteger, - db.ForeignKey("study_redcap_project_api.project_id", ondelete="CASCADE"), - nullable=False, - ) + # Foreign Keys study_id: str = db.Column( db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False, ) + redcap_id: int = db.Column( + db.CHAR(36), + db.ForeignKey("study_redcap.id", ondelete="CASCADE"), + nullable=False, + ) + # project_id: int = db.Column( + # db.BigInteger, + # db.ForeignKey("study_redcap.api_pid", ondelete="CASCADE"), + # nullable=True + # ) + # Relations study = db.relationship( - "Study", back_populates="study_redcap_project_dashboards", cascade="all, delete" + "Study", back_populates="study_dashboard", cascade="all, delete" ) - study_redcap_project_api = db.relationship( - "StudyRedcapProjectApi", - back_populates="study_redcap_project_dashboards", + study_redcap = db.relationship( + "StudyRedcap", + backref="study_dashboard", cascade="all, delete", ) - def __init__(self, study): + def __init__(self, study: Study): self.study = study - self.dashboard_id = str(uuid.uuid4()) + self.id = str(uuid.uuid4()) self.created_at = datetime.now(timezone.utc).timestamp() def to_dict(self) -> Dict: """Converts the study to a dictionary""" return { - "project_id": self.project_id, - "dashboard_id": self.dashboard_id, - "dashboard_name": self.dashboard_name, - "dashboard_modules": self.dashboard_modules, + "id": self.id, + "name": self.name, + "modules": self.modules, + "redcap_id": self.redcap_id, "reports": self.reports, "created_at": self.created_at, "updated_on": self.updated_on, @@ -65,17 +73,16 @@ def to_dict(self) -> Dict: @staticmethod def from_data(study: Study, data: Dict) -> Any: """Creates a new study from a dictionary""" - study_redcap_project_dashboard = StudyRedcapProjectDashboard(study) - study_redcap_project_dashboard.update(data) - return study_redcap_project_dashboard + study_dashboard = StudyDashboard(study) + study_dashboard.update(data) + return study_dashboard def update(self, data: Dict) -> Any: """Updates the study from a dictionary""" user_updatable_props = [ - "project_id", - "dashboard_id", - "dashboard_name", - "dashboard_modules", + "name", + "modules", + "redcap_id", "reports", ] for key, val in data.items(): diff --git a/model/study_redcap_project_api.py b/model/study_redcap.py similarity index 53% rename from model/study_redcap_project_api.py rename to model/study_redcap.py index 2f435442..8fc6e5d3 100644 --- a/model/study_redcap_project_api.py +++ b/model/study_redcap.py @@ -9,30 +9,30 @@ @dataclass -class StudyRedcapProjectApi(db.Model): # type: ignore +class StudyRedcap(db.Model): # type: ignore """ A REDCap Project API is associated a study """ - __tablename__: str = "study_redcap_project_api" - project_id: int = db.Column(db.BigInteger, primary_key=True) - project_title: str = db.Column(db.String, nullable=False) - project_api_url: str = db.Column(db.String, nullable=False) - project_api_key: str = db.Column(db.String, nullable=False) - project_api_active: bool = db.Column(db.Boolean, nullable=False) + __tablename__: str = "study_redcap" + # Columns + id: str = db.Column(db.CHAR(36), primary_key=True) + title: str = db.Column(db.String, nullable=True) + api_pid: int = db.Column(db.BigInteger, nullable=True) + api_url: str = db.Column(db.String, nullable=True) + api_key: str = db.Column(db.String, nullable=True) + api_active: bool = db.Column(db.Boolean, nullable=True) created_at: float = db.Column(db.BigInteger, nullable=False) updated_on: float = db.Column(db.BigInteger, nullable=False) - + # Foreign Keys study_id: str = db.Column( db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False, ) + # Relations study = db.relationship( - "Study", back_populates="study_redcap_project_apis", cascade="all, delete" - ) - study_redcap_project_dashboards = db.relationship( - "StudyRedcapProjectDashboard", back_populates="study_redcap_project_api" + "Study", back_populates="study_redcap", cascade="all, delete" ) def __init__(self, study): @@ -43,28 +43,29 @@ def __init__(self, study): def to_dict(self) -> Dict: """Converts the study to a dictionary""" return { - "project_id": self.project_id, - "project_title": self.project_title, - "project_api_url": self.project_api_url, - "project_api_key": self.project_api_key, - "project_api_active": self.project_api_active, + "id": self.id, + "title": self.title, + "api_pid": self.api_pid, + "api_url": self.api_url, + "api_key": self.api_key, + "api_active": self.api_active, } @staticmethod def from_data(study: Study, data: Dict) -> Any: """Creates a new study from a dictionary""" - study_redcap_project_api = StudyRedcapProjectApi(study) - study_redcap_project_api.update(data) - return study_redcap_project_api + study_redcap = StudyRedcap(study) + study_redcap.update(data) + return study_redcap def update(self, data: Dict) -> Any: """Updates the study from a dictionary""" user_updatable_props = [ - "project_id", - "project_title", - "project_api_url", - "project_api_key", - "project_api_active", + "title", + "api_pid", + "api_url", + "api_key", + "api_active", ] for key, val in data.items(): if key in user_updatable_props: diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index a3bee7ec..83089d54 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -406,7 +406,7 @@ def _remap_values_by_columns( for subvalue in str(value).split(",") if len(subvalue) > 0 ] - df[column][i] = self.multivalue_separator.join( + df.loc[i, column] = self.multivalue_separator.join( [ value_map[subvalue] for subvalue in subvalues @@ -461,7 +461,7 @@ def _transform_values_by_column( missing_value: Any, annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - df[new_column_name] = df[column][df[column] != missing_value].apply(transform) + df[new_column_name] = df.loc[df[column] != missing_value, column].apply(transform) df[new_column_name] = df[new_column_name].fillna(missing_value) return df @@ -503,7 +503,7 @@ def _map_missing_values_by_columns( for column in columns: for i, value in enumerate(df[column]): if (len(str(value)) == 0) or (value in self.none_map.keys()): - df[column][i] = missing_value + df.loc[i, column] = missing_value else: continue diff --git a/sql/init.sql b/sql/init.sql index 99cff89a..8cbcfbe0 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -792,6 +792,53 @@ INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "stud ('00000000-0000-0000-0000-000000000002', 'Ashlynn', 'Kuhic - Towne', 'Study Chair', '00000000-0000-0000-0000-000000000001', 1693805471); /*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; +-- Dumping data for table public.version_participants: 1 rows +/*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; +INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'); +/*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; + +-- Dumping structure for table public.study_redcap +CREATE TABLE IF NOT EXISTS "study_redcap" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "api_pid": BIGINT NOT NULL, + "api_url" VARCHAR NOT NULL, + "api_key" CHAR(32) NOT NULL, + "api_active" BOOLEAN NOT NULL, + "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_redcap_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_redcap: 1 rows +/*!40000 ALTER TABLE "study_redcap" DISABLE KEYS */; +INSERT INTO "study_redcap" ("study_id", "id", "title", "api_pid", "api_url", "api_key", "api_active", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', 'redcap-name', '666666', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', 0, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_redcap" ENABLE KEYS */; + +-- Dumping structure for table public.study_dashboard +CREATE TABLE IF NOT EXISTS "study_dashboard" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "modules" UNKNOWN NOT NULL, + "reports" UNKNOWN NOT NULL, + "study_id" CHAR(36) NOT NULL, + "redcap_id" BIGINT NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_dashboard_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_dashboard_redcap_id_fkey" FOREIGN KEY ("redcap_id") REFERENCES "study_redcap" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); +-- Dumping data for table public.study_dashboard: 1 rows +/*!40000 ALTER TABLE "study_dashboard" DISABLE KEYS */; +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "reports", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_dashboard" ENABLE KEYS */; + -- Dumping structure for table public.study_reference CREATE TABLE IF NOT EXISTS "study_reference" ( "id" CHAR(36) NOT NULL, @@ -913,54 +960,6 @@ CREATE TABLE IF NOT EXISTS "version_participants" ( CONSTRAINT "version_participants_participant_id_fkey" FOREIGN KEY ("participant_id") REFERENCES "participant" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping data for table public.version_participants: 1 rows -/*!40000 ALTER TABLE "version_participants" DISABLE KEYS */; -INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALUES - ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; - --- Dumping structure for table public.study_redcap_project_api -CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( - "study_id" CHAR(36) NOT NULL, - "project_id" BIGINT NOT NULL, - "project_title" VARCHAR NOT NULL, - "project_api_url" VARCHAR NOT NULL, - "project_api_key" CHAR(32) NOT NULL, - "project_api_active" BOOLEAN NOT NULL, - "created_at" BIGINT NOT NULL, - "updated_on" BIGINT NOT NULL, - PRIMARY KEY ("study_id", "project_id"), - CONSTRAINT "study_redcap_project_api_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_redcap_project_api: 1 rows -/*!40000 ALTER TABLE "study_redcap_project_api" DISABLE KEYS */; -INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "project_api_active", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', 0, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); -/*!40000 ALTER TABLE "study_redcap_project_api" ENABLE KEYS */; - --- Dumping structure for table public.study_redcap_project_dashboard -CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( - "study_id" CHAR(36) NOT NULL, - "project_id" BIGINT NOT NULL, - "dashboard_id" CHAR(36) NOT NULL, - "reports" UNKNOWN NOT NULL, - "dashboard_name" VARCHAR NOT NULL, - "dashboard_modules" UNKNOWN NOT NULL, - "created_at" BIGINT NOT NULL, - "updated_on" BIGINT NOT NULL, - PRIMARY KEY ("study_id", "project_id", "dashboard_id"), - CONSTRAINT "study_redcap_project_dashboard_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, - CONSTRAINT "study_redcap_project_dashboard_project_id_fkey" FOREIGN KEY ("project_id") REFERENCES "study_redcap_project_api" ("project_id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_redcap_project_dashboard: 1 rows -/*!40000 ALTER TABLE "study_redcap_project_dashboard" DISABLE KEYS */; -INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "reports", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', '{}', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); -/*!40000 ALTER TABLE "study_redcap_project_dashboard" ENABLE KEYS */; - - /*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index e551657d..3a5b1e2d 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -923,6 +923,49 @@ INSERT INTO "study_overall_official" ("id", "name", "affiliation", "role", "stud ('319c21f2-9441-48ec-a64c-ab839a1da2a3', 'Maiya', 'Medhurst - Marks', 'Study Chair', '00000000-0000-0000-0000-000000000002', 1694326095); /*!40000 ALTER TABLE "study_overall_official" ENABLE KEYS */; +-- Dumping structure for table public.study_redcap +CREATE TABLE IF NOT EXISTS "study_redcap" ( + "id" CHAR(36) NOT NULL, + "title" VARCHAR NOT NULL, + "api_pid": BIGINT NOT NULL, + "api_url" VARCHAR NOT NULL, + "api_key" CHAR(32) NOT NULL, + "api_active" BOOLEAN NOT NULL, + "study_id" CHAR(36) NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_redcap_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_redcap: 1 rows +/*!40000 ALTER TABLE "study_redcap" DISABLE KEYS */; +INSERT INTO "study_redcap" ("study_id", "id", "title", "api_pid", "api_url", "api_key", "api_active", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', 'redcap-name', '666666', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', 0, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_redcap" ENABLE KEYS */; + +-- Dumping structure for table public.study_dashboard +CREATE TABLE IF NOT EXISTS "study_dashboard" ( + "id" CHAR(36) NOT NULL, + "name" VARCHAR NOT NULL, + "modules" UNKNOWN NOT NULL, + "reports" UNKNOWN NOT NULL, + "study_id" CHAR(36) NOT NULL, + "redcap_id" BIGINT NOT NULL, + "created_at" BIGINT NOT NULL, + "updated_on" BIGINT NOT NULL, + PRIMARY KEY ("id"), + CONSTRAINT "study_dashboard_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, + CONSTRAINT "study_dashboard_redcap_id_fkey" FOREIGN KEY ("redcap_id") REFERENCES "study_redcap" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION +); + +-- Dumping data for table public.study_dashboard: 1 rows +/*!40000 ALTER TABLE "study_dashboard" DISABLE KEYS */; +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "reports", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +/*!40000 ALTER TABLE "study_dashboard" ENABLE KEYS */; + + -- Dumping structure for table public.study_reference CREATE TABLE IF NOT EXISTS "study_reference" ( "id" CHAR(36) NOT NULL, @@ -1036,48 +1079,6 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "version_participants" ENABLE KEYS */; --- Dumping structure for table public.study_redcap_project_api -CREATE TABLE IF NOT EXISTS "study_redcap_project_api" ( - "study_id" CHAR(36) NOT NULL, - "project_id" BIGINT NOT NULL, - "project_title" VARCHAR NOT NULL, - "project_api_url" VARCHAR NOT NULL, - "project_api_key" CHAR(32) NOT NULL, - "project_api_active" BOOLEAN NOT NULL, - "created_at" BIGINT NOT NULL, - "updated_on" BIGINT NOT NULL, - PRIMARY KEY ("study_id", "project_id"), - CONSTRAINT "study_redcap_project_api_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_redcap_project_api: 1 rows -/*!40000 ALTER TABLE "study_redcap_project_api" DISABLE KEYS */; -INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "project_api_active", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', 'other-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA6', 0, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); -/*!40000 ALTER TABLE "study_redcap_project_api" ENABLE KEYS */; - --- Dumping structure for table public.study_redcap_project_dashboard -CREATE TABLE IF NOT EXISTS "study_redcap_project_dashboard" ( - "study_id" CHAR(36) NOT NULL, - "project_id" BIGINT NOT NULL, - "dashboard_id" CHAR(36) NOT NULL, - "reports" UNKNOWN NOT NULL, - "dashboard_name" VARCHAR NOT NULL, - "dashboard_modules" VARCHAR[] NOT NULL, - "created_at" BIGINT NOT NULL, - "updated_on" BIGINT NOT NULL, - PRIMARY KEY ("study_id", "project_id", "dashboard_id"), - CONSTRAINT "study_redcap_project_dashboard_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, - CONSTRAINT "study_redcap_project_dashboard_project_id_fkey" FOREIGN KEY ("project_id") REFERENCES "study_redcap_project_api" ("project_id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.study_redcap_project_dashboard: 1 rows -/*!40000 ALTER TABLE "study_redcap_project_dashboard" DISABLE KEYS */; -INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "reports", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '666666', '00000000-0000-0000-0000-000000000006', '{}', 'other-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); -/*!40000 ALTER TABLE "study_redcap_project_dashboard" ENABLE KEYS */; - - /*!40103 SET TIME_ZONE=IFNULL(@OLD_TIME_ZONE, 'system') */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IFNULL(@OLD_FOREIGN_KEY_CHECKS, 1) */; diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index 45f4738e..a42f87e5 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -63,19 +63,19 @@ INSERT INTO "version_participants" ("dataset_version_id", "participant_id") VALU ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000002'), ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000001'); -INSERT INTO "study_redcap_project_api" ("study_id", "project_id", "project_title", "project_api_url", "project_api_key", "project_api_active", "updated_on", "created_at") VALUES - ('00000000-0000-0000-0000-000000000001', '11111', 'ai-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA1', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000002', '22222', 'dev-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA2', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000003', '33333', 'ops-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA3', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000004', '44444', 'data-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), - ('00000000-0000-0000-0000-000000000005', '55555', 'more-stuff', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'); - -INSERT INTO "study_redcap_project_dashboard" ("study_id", "project_id", "dashboard_id", "reports", "dashboard_name", "dashboard_modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', '11111', '10000000-0000-0000-0000-000000000000', '{}', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000002', '22222', '20000000-0000-0000-0000-000000000000', '{}', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000003', '33333', '30000000-0000-0000-0000-000000000000', '{}', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000004', '44444', '40000000-0000-0000-0000-000000000000', '{}', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000005', '55555', '50000000-0000-0000-0000-000000000000', '{}', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_redcap" ("study_id", "id", "title", "api_pid", "api_url", "api_key", "api_active", "updated_on", "created_at") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006', 'ai-stuff', '11111', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA1', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', 'dev-stuff', '22222', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA2', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000008', 'ops-stuff', '33333', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA3', 0, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', 'data-stuff', '44444', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', 1, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', 'more-stuff', '55555', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', 1, '2023-08-13 16:23:48', '2023-08-13 16:23:49'); + +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "reports", "name", "modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006', '10000000-0000-0000-0000-000000000000', '{}', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', '20000000-0000-0000-0000-000000000000', '{}', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000008', '30000000-0000-0000-0000-000000000000', '{}', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', '40000000-0000-0000-0000-000000000000', '{}', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', '50000000-0000-0000-0000-000000000000', '{}', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; From 894b18aac4a0e0d9851fb0568f1f8573f68fa21f Mon Sep 17 00:00:00 2001 From: Lint Action Date: Sat, 17 Feb 2024 01:47:45 +0000 Subject: [PATCH 425/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++----------- modules/etl/transforms/redcap_transform.py | 4 ++- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index f5791da5..f7889eb2 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -291,9 +291,9 @@ def post(self, study_id: int): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[ - str, Any - ] = connect_redcap_project_dashboard_data.to_dict() + connect_redcap_project_dashboard: Dict[str, Any] = ( + connect_redcap_project_dashboard_data.to_dict() + ) return connect_redcap_project_dashboard, 201 @@ -315,9 +315,9 @@ def get(self, study_id: int): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[ - str, Any - ] = redcap_project_dashboard_connector_query.to_dict() + redcap_project_dashboard_connector: Dict[str, Any] = ( + redcap_project_dashboard_connector_query.to_dict() + ) return redcap_project_dashboard_connector, 201 @@ -349,9 +349,9 @@ def get(self, study_id: int): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Get REDCap Project project_id = redcap_project_dashboard["project_id"] @@ -367,9 +367,9 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( + report["report_id"] + ) # Structure REDCap ETL Config redcap_etl_config = { @@ -515,9 +515,9 @@ def put(self, study_id: int): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + update_redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_transform.py index 83089d54..b953de55 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_transform.py @@ -461,7 +461,9 @@ def _transform_values_by_column( missing_value: Any, annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - df[new_column_name] = df.loc[df[column] != missing_value, column].apply(transform) + df[new_column_name] = df.loc[df[column] != missing_value, column].apply( + transform + ) df[new_column_name] = df[new_column_name].fillna(missing_value) return df From d9e6d81441a2356561838d5cd3a3d97a821af646 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 20 Feb 2024 12:33:49 -0800 Subject: [PATCH 426/505] =?UTF-8?q?feat:=20=E2=9C=A8=20improved=20REDcap?= =?UTF-8?q?=20&=20Dashboard=20API=20structure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 449 ++++++++++++++++++++++++++++++--------- apis/redcap.py | 3 - app.py | 10 +- model/study_dashboard.py | 8 +- sql/init.sql | 7 +- sql/init_timezones.sql | 9 +- sql/specific_tables.sql | 12 +- 7 files changed, 370 insertions(+), 128 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index f7889eb2..ff109d18 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -5,7 +5,7 @@ from flask import request # from flask_caching import Cache -from flask_restx import Namespace, Resource, fields, reqparse +from flask_restx import Namespace, Resource, fields from jsonschema import ValidationError, validate import model @@ -73,7 +73,7 @@ required=True, readonly=True, description="Dashboard module name" ), "id": fields.String( - required=True, readonly=True, description="Dashboard module id" + required=True, readonly=True, description="Dashboard module ID" ), "report_key": fields.String( required=True, @@ -94,8 +94,17 @@ redcap_project_dashboard_model = api.model( "RedcapProjectDashboard", { - "project_id": fields.String( - required=True, readonly=True, description="REDCap project ID (pid)" + "redcap_id": fields.String( + required=True, readonly=True, description="REDCap ID" + ), + "id": fields.String( + required=True, readonly=True, description="REDCap dashboard ID" + ), + "name": fields.String( + required=True, readonly=True, description="REDCap dashboard name" + ), + "redcap_pid": fields.String( + required=True, readonly=True, description="REDCap project ID (PID)" ), "reports": fields.List( fields.Nested( @@ -105,13 +114,7 @@ description="Associated REDCap reports", ) ), - "dashboard_id": fields.String( - required=True, readonly=True, description="REDCap dashboard ID" - ), - "dashboard_name": fields.String( - required=True, readonly=True, description="REDCap dashboard name" - ), - "dashboard_modules": fields.List( + "modules": fields.List( fields.Nested( redcap_project_dashboard_module_model, required=True, @@ -128,7 +131,10 @@ required=True, readonly=True, description="Dashboard module name" ), "id": fields.String( - required=True, readonly=True, description="Dashboard module id" + required=True, readonly=True, description="Dashboard module ID" + ), + "redcap_pid": fields.String( + required=True, readonly=True, description="REDCap project ID (PID)" ), "report_key": fields.String( required=True, @@ -143,8 +149,11 @@ redcap_project_dashboard_connector_model = api.model( "RedcapProjectDashboardConnector", { - "project_id": fields.String( - required=True, readonly=True, description="REDCap project ID (pid)" + "redcap_id": fields.String( + required=True, readonly=True, description="REDCap ID" + ), + "redcap_pid": fields.String( + required=True, readonly=True, description="REDCap project ID (PID)" ), "reports": fields.List( fields.Nested( @@ -154,13 +163,13 @@ description="Associated REDCap reports", ) ), - "dashboard_id": fields.String( + "id": fields.String( required=True, readonly=True, description="REDCap dashboard ID" ), - "dashboard_name": fields.String( + "name": fields.String( required=True, readonly=True, description="REDCap dashboard name" ), - "dashboard_modules": fields.List( + "modules": fields.List( fields.Nested( redcap_project_dashboard_module_connector_model, required=True, @@ -171,18 +180,14 @@ }, ) -# Parser -dashboard_parser = reqparse.RequestParser() -dashboard_parser.add_argument("dashboard_id", type=str, help="Dashboard ID") - -@api.route("/study//dashboard/all") +@api.route("/study//dashboard") class RedcapProjectDashboards(Resource): - @api.doc("redcap_project_dashboards") + @api.doc("Get all study dashboards") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model, as_list=True) - def get(self, study_id: int): + def get(self, study_id: str): """Get all REDCap project dashboards""" study = model.db.session.query(model.Study).get(study_id) if not is_granted("view", study): @@ -194,16 +199,14 @@ def get(self, study_id: int): redcap_project_dashboard.to_dict() for redcap_project_dashboard in redcap_project_dashboards_query ] + print(redcap_project_dashboards) return redcap_project_dashboards, 201 - -@api.route("/study//dashboard/add") -class AddRedcapProjectDashboard(Resource): - @api.doc(parser=dashboard_parser) + @api.doc("Create a new study dashboard") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) - def post(self, study_id: int): + def post(self, study_id: str): """Create REDCap project dashboard""" study = model.Study.query.get(study_id) if not is_granted("add_dashboard", study): @@ -213,13 +216,15 @@ def post(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ - "project_id", + "redcap_id", + "redcap_pid", "reports", - "dashboard_name", - "dashboard_modules", + "name", + "modules", ], "properties": { - "project_id": {"type": "string", "minLength": 1}, + "redcap_id": {"type": "string", "minLength": 1}, + "redcap_pid": {"type": "string", "minLength": 1}, "reports": { "type": "array", "items": { @@ -236,8 +241,8 @@ def post(self, study_id: int): }, "minItems": 1, }, - "dashboard_name": {"type": "string", "minLength": 1}, - "dashboard_modules": { + "name": {"type": "string", "minLength": 1}, + "modules": { "type": "array", "items": { "anyOf": [ @@ -257,15 +262,22 @@ def post(self, study_id: int): }, } data: Union[Any, Dict[str, Any]] = request.json + print(data) try: validate(request.json, schema) except ValidationError as e: print("validation error") return e.message, 400 - if len(data["project_id"]) < 1: + if len(data["redcap_id"]) < 1: return ( - f"""redcap project_id is required to connect a dashboard: - {data['project_id']}""", + f"""redcap redcap_id is required to connect a dashboard: + {data['redcap_id']}""", + 400, + ) + if len(data["redcap_pid"]) < 1: + return ( + f"""redcap redcap_pid is required to connect a dashboard: + {data['redcap_pid']}""", 400, ) if len(data["reports"]) < 1: @@ -274,70 +286,167 @@ def post(self, study_id: int): {data['reports']}""", 400, ) - if len(data["dashboard_name"]) < 1: + if len(data["name"]) < 1: return ( - f"""dashboard dashboard_name is required to connect a dashboard: - {data['dashboard_name']}""", + f"""dashboard name is required to connect a dashboard: + {data['name']}""", 400, ) - if len(data["dashboard_modules"]) < 1: + if len(data["modules"]) < 1: return ( - f"""dashboard dashboard_modules is required to connect a dashboard: - {data['dashboard_name']}""", + f"""dashboard modules is required to connect a dashboard: + {data['modules']}""", 400, ) + connect_redcap_project_dashboard_data = model.StudyDashboard.from_data( study, data ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 -@api.route("/study//dashboard-connector") +# @api.route("/study//dashboard/add") +# class AddRedcapProjectDashboard(Resource): +# @api.doc(parser=dashboard_parser) +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_dashboard_model) +# def post(self, study_id: str): +# """Create REDCap project dashboard""" +# study = model.Study.query.get(study_id) +# if not is_granted("add_dashboard", study): +# return "Access denied, you can not modify", 403 +# # Schema validation +# schema = { +# "type": "object", +# "additionalProperties": False, +# "required": [ +# "redcap_id", +# "reports", +# "name", +# "modules", +# ], +# "properties": { +# "redcap_id": {"type": "string", "minLength": 1}, +# "reports": { +# "type": "array", +# "items": { +# "anyOf": [ +# { +# "type": "object", +# "properties": { +# "report_id": {"type": "string", "minLength": 0}, +# "report_key": {"type": "string", "minLength": 1}, +# "report_name": {"type": "string", "minLength": 1}, +# }, +# } +# ] +# }, +# "minItems": 1, +# }, +# "name": {"type": "string", "minLength": 1}, +# "modules": { +# "type": "array", +# "items": { +# "anyOf": [ +# { +# "type": "object", +# "properties": { +# "id": {"type": "string", "minLength": 1}, +# "name": {"type": "string", "minLength": 1}, +# "selected": {"type": "boolean"}, +# "report_key": {"type": "string", "minLength": 1}, +# }, +# } +# ] +# }, +# "minItems": 1, +# }, +# }, +# } +# data: Union[Any, Dict[str, Any]] = request.json +# try: +# validate(request.json, schema) +# except ValidationError as e: +# print("validation error") +# return e.message, 400 +# if len(data["redcap_id"]) < 1: +# return ( +# f"""redcap redcap_id is required to connect a dashboard: +# {data['redcap_id']}""", +# 400, +# ) +# if len(data["reports"]) < 1: +# return ( +# f"""redcap reports are required to connect a dashboard: +# {data['reports']}""", +# 400, +# ) +# if len(data["name"]) < 1: +# return ( +# f"""dashboard name is required to connect a dashboard: +# {data['name']}""", +# 400, +# ) +# if len(data["modules"]) < 1: +# return ( +# f"""dashboard modules is required to connect a dashboard: +# {data['name']}""", +# 400, +# ) +# connect_redcap_project_dashboard_data = model.StudyDashboard.from_data( +# study, data +# ) +# model.db.session.add(connect_redcap_project_dashboard_data) +# model.db.session.commit() +# connect_redcap_project_dashboard: Dict[str, Any] = ( +# connect_redcap_project_dashboard_data.to_dict() +# ) +# return connect_redcap_project_dashboard, 201 + + +@api.route("/study//dashboard//connector") class RedcapProjectDashboardConnector(Resource): - @api.doc(parser=dashboard_parser) + @api.doc("Get a study dashboard connector") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_connector_model) - def get(self, study_id: int): + def get(self, study_id: str, dashboard_id: str): """Get REDCap project dashboard connector""" study = model.db.session.query(model.Study).get(study_id) if not is_granted("view", study): return "Access denied, you can not get this dashboard", 403 # Get Dashboard Connector - dashboard_id = dashboard_parser.parse_args()["dashboard_id"] redcap_project_dashboard_connector_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[str, Any] = ( - redcap_project_dashboard_connector_query.to_dict() - ) + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() return redcap_project_dashboard_connector, 201 -@api.route("/study//dashboard") +@api.route("/study//dashboard/") class RedcapProjectDashboard(Resource): - @api.doc(parser=dashboard_parser) + @api.doc("Get a study dashboard") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) - def get(self, study_id: int): + def get(self, study_id: str, dashboard_id: str): """Get REDCap project dashboard""" model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) if not is_granted("view", study): return "Access denied, you can not get this dashboard", 403 - # Get Dashboard - dashboard_id = dashboard_parser.parse_args()["dashboard_id"] - # Retrieve Dashboard Redis Cache cached_redcap_project_dashboard = cache.get( f"$study_id#{study_id}$dashboard_id#{dashboard_id}" @@ -349,14 +458,14 @@ def get(self, study_id: int): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Get REDCap Project - project_id = redcap_project_dashboard["project_id"] + redcap_id = redcap_project_dashboard["redcap_id"] redcap_project_view_query: Any = model.db.session.query(model.StudyRedcap).get( - project_id + redcap_id ) redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() @@ -367,20 +476,20 @@ def get(self, study_id: int): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { - "redcap_api_url": redcap_project_view["project_api_url"], - "redcap_api_key": redcap_project_view["project_api_key"], + "redcap_api_url": redcap_project_view["api_url"], + "redcap_api_key": redcap_project_view["api_key"], } | redcapTransformConfig redcapTransform = RedcapTransform(redcap_etl_config) # Execute Dashboard Module Transforms - for dashboard_module in redcap_project_dashboard["dashboard_modules"]: + for dashboard_module in redcap_project_dashboard["modules"]: if dashboard_module["selected"]: mergedTransform = redcapTransform.merged transform, module_etl_config = moduleTransformConfigs[ @@ -408,14 +517,11 @@ def get(self, study_id: int): return redcap_project_dashboard, 201 - -@api.route("/study//dashboard/edit") -class EditRedcapProjectDashboard(Resource): - @api.doc(parser=dashboard_parser) + @api.doc("Update a study dashboard") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) - def put(self, study_id: int): + def put(self, study_id: str, dashboard_id: str): """Update REDCap project dashboard""" study = model.db.session.query(model.Study).get(study_id) if not is_granted("update_dashboard", study): @@ -425,14 +531,16 @@ def put(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ - "project_id", + "redcap_id", + "redcap_pid", "reports", "dashboard_id", - "dashboard_name", - "dashboard_modules", + "name", + "modules", ], "properties": { - "project_id": {"type": "string", "minLength": 1}, + "redcap_id": {"type": "string", "minLength": 1}, + "redcap_pid": {"type": "string", "minLength": 1}, "reports": { "type": "array", "items": { @@ -450,8 +558,8 @@ def put(self, study_id: int): "minItems": 1, }, "dashboard_id": {"type": "string", "minLength": 1}, - "dashboard_name": {"type": "string", "minLength": 1}, - "dashboard_modules": { + "name": {"type": "string", "minLength": 1}, + "modules": { "type": "array", "items": { "anyOf": [ @@ -476,10 +584,16 @@ def put(self, study_id: int): except ValidationError as e: print("validation error") return e.message, 400 - if len(data["project_id"]) < 1: + if len(data["redcap_id"]) < 1: return ( - f"""redcap project_id is required to connect a dashboard: - {data['project_id']}""", + f"""redcap redcap_id is required to connect a dashboard: + {data['redcap_id']}""", + 400, + ) + if len(data["redcap_pid"]) < 1: + return ( + f"""redcap redcap_pid is required to connect a dashboard: + {data['redcap_pid']}""", 400, ) if len(data["reports"]) < 1: @@ -494,51 +608,180 @@ def put(self, study_id: int): {data['dashboard_id']}""", 400, ) - if len(data["dashboard_name"]) < 1: + if len(data["name"]) < 1: return ( - f"""dashboard dashboard_name is required to connect a dashboard: - {data['dashboard_name']}""", + f"""dashboard name is required to connect a dashboard: + {data['name']}""", 400, ) - if len(data["dashboard_modules"]) < 1: + if len(data["modules"]) < 1: return ( - f"""dashboard dashboard_modules is required to connect a dashboard: - {data['dashboard_name']}""", + f"""dashboard modules is required to connect a dashboard: + {data['name']}""", 400, ) - dashboard_id = data["dashboard_id"] - redcap_project_dashboard_query = model.StudyDashboard.query.get(dashboard_id) if redcap_project_dashboard_query is None: return "An error occurred while updating the dashboard", 500 redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") return update_redcap_project_dashboard, 201 - -@api.route("/study//dashboard/delete") -class DeleteRedcapProjectDashboard(Resource): - @api.doc(parser=dashboard_parser) + @api.doc("Delete a study dashboard") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) - def delete(self, study_id: int): + def delete(self, study_id: str, dashboard_id: str): """Delete REDCap project dashboard""" study = model.Study.query.get(study_id) if not is_granted("delete_dashboard", study): return "Access denied, you can not delete this redcap project", 403 - dashboard_id = dashboard_parser.parse_args()["dashboard_id"] - model.StudyDashboard.query.filter_by(dashboard_id=dashboard_id).delete() + model.StudyDashboard.query.filter_by(id=dashboard_id).delete() model.db.session.commit() return 204 + + +# @api.route("/study//dashboard/edit") +# class EditRedcapProjectDashboard(Resource): +# @api.doc(parser=dashboard_parser) +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_dashboard_model) +# def put(self, study_id: str): +# """Update REDCap project dashboard""" +# study = model.db.session.query(model.Study).get(study_id) +# if not is_granted("update_dashboard", study): +# return "Access denied, you can not modify this dashboard", 403 +# # Schema validation +# schema = { +# "type": "object", +# "additionalProperties": False, +# "required": [ +# "redcap_id", +# "reports", +# "dashboard_id", +# "name", +# "modules", +# ], +# "properties": { +# "redcap_id": {"type": "string", "minLength": 1}, +# "reports": { +# "type": "array", +# "items": { +# "anyOf": [ +# { +# "type": "object", +# "properties": { +# "report_id": {"type": "string", "minLength": 0}, +# "report_key": {"type": "string", "minLength": 1}, +# "report_name": {"type": "string", "minLength": 1}, +# }, +# } +# ] +# }, +# "minItems": 1, +# }, +# "dashboard_id": {"type": "string", "minLength": 1}, +# "name": {"type": "string", "minLength": 1}, +# "modules": { +# "type": "array", +# "items": { +# "anyOf": [ +# { +# "type": "object", +# "properties": { +# "id": {"type": "string", "minLength": 1}, +# "name": {"type": "string", "minLength": 1}, +# "selected": {"type": "boolean"}, +# "report_key": {"type": "string", "minLength": 1}, +# }, +# } +# ] +# }, +# "minItems": 1, +# }, +# }, +# } +# data: Union[Any, Dict[str, Any]] = request.json +# try: +# validate(request.json, schema) +# except ValidationError as e: +# print("validation error") +# return e.message, 400 +# if len(data["redcap_id"]) < 1: +# return ( +# f"""redcap redcap_id is required to connect a dashboard: +# {data['redcap_id']}""", +# 400, +# ) +# if len(data["reports"]) < 1: +# return ( +# f"""redcap reports are required to connect a dashboard: +# {data['reports']}""", +# 400, +# ) +# if len(data["dashboard_id"]) < 1: +# return ( +# f"""dashboard dashboard_id is required to connect a dashboard: +# {data['dashboard_id']}""", +# 400, +# ) +# if len(data["name"]) < 1: +# return ( +# f"""dashboard name is required to connect a dashboard: +# {data['name']}""", +# 400, +# ) +# if len(data["modules"]) < 1: +# return ( +# f"""dashboard modules is required to connect a dashboard: +# {data['name']}""", +# 400, +# ) + +# dashboard_id = data["dashboard_id"] + +# redcap_project_dashboard_query = model.StudyDashboard.query.get(dashboard_id) +# if redcap_project_dashboard_query is None: +# return "An error occurred while updating the dashboard", 500 + +# redcap_project_dashboard_query.update(data) +# model.db.session.commit() +# update_redcap_project_dashboard: Dict[str, Any] = ( +# redcap_project_dashboard_query.to_dict() +# ) + +# # Clear Dashboard from Redis Cache +# cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") + +# return update_redcap_project_dashboard, 201 + + +# @api.route("/study//dashboard/delete") +# class DeleteRedcapProjectDashboard(Resource): +# @api.doc(parser=dashboard_parser) +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# @api.marshal_with(redcap_project_dashboard_model) +# def delete(self, study_id: str): +# """Delete REDCap project dashboard""" +# study = model.Study.query.get(study_id) +# if not is_granted("delete_dashboard", study): +# return "Access denied, you can not delete this redcap project", 403 + +# dashboard_id = dashboard_parser.parse_args()["dashboard_id"] +# model.StudyDashboard.query.filter_by(dashboard_id=dashboard_id).delete() +# model.db.session.commit() + +# return 204 diff --git a/apis/redcap.py b/apis/redcap.py index 243b464a..6e669482 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -41,9 +41,6 @@ }, ) -# project_parser = reqparse.RequestParser() -# project_parser.add_argument("api_pid", type=str, help="REDCap project ID (pid)") - @api.route("/study//redcap") class RedcapProjectAPILink(Resource): diff --git a/app.py b/app.py index 5242c133..ab51eeb4 100644 --- a/app.py +++ b/app.py @@ -133,9 +133,13 @@ def cycle_schema(): if config.FAIRHUB_DATABASE_URL.find("azure") > -1: return engine = model.db.session.get_bind() - with engine.begin(): - model.db.drop_all() - model.db.create_all() + metadata = MetaData() + metadata.reflect(bind=engine) + table_names = [table.name for table in metadata.tables.values()] + if len(table_names) == 0: + with engine.begin(): + model.db.drop_all() + model.db.create_all() @app.cli.command("inspect-schemas") def inspect_schemas(): diff --git a/model/study_dashboard.py b/model/study_dashboard.py index 7a2dc53c..c3678654 100644 --- a/model/study_dashboard.py +++ b/model/study_dashboard.py @@ -24,6 +24,7 @@ class StudyDashboard(db.Model): # type: ignore modules: list[dict[str, (str | bool | int)]] = db.Column( NestedMutableJson, nullable=True ) + redcap_pid: int = db.Column(db.BigInteger, nullable=True) reports: list[dict[str, str]] = db.Column(NestedMutableJson, nullable=True) created_at: float = db.Column(db.BigInteger, nullable=False) updated_on: float = db.Column(db.BigInteger, nullable=False) @@ -38,11 +39,6 @@ class StudyDashboard(db.Model): # type: ignore db.ForeignKey("study_redcap.id", ondelete="CASCADE"), nullable=False, ) - # project_id: int = db.Column( - # db.BigInteger, - # db.ForeignKey("study_redcap.api_pid", ondelete="CASCADE"), - # nullable=True - # ) # Relations study = db.relationship( "Study", back_populates="study_dashboard", cascade="all, delete" @@ -65,6 +61,7 @@ def to_dict(self) -> Dict: "name": self.name, "modules": self.modules, "redcap_id": self.redcap_id, + "redcap_pid": self.redcap_pid, "reports": self.reports, "created_at": self.created_at, "updated_on": self.updated_on, @@ -83,6 +80,7 @@ def update(self, data: Dict) -> Any: "name", "modules", "redcap_id", + "redcap_pid", "reports", ] for key, val in data.items(): diff --git a/sql/init.sql b/sql/init.sql index 8cbcfbe0..3fda8433 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -826,7 +826,8 @@ CREATE TABLE IF NOT EXISTS "study_dashboard" ( "modules" UNKNOWN NOT NULL, "reports" UNKNOWN NOT NULL, "study_id" CHAR(36) NOT NULL, - "redcap_id" BIGINT NOT NULL, + "redcap_id" CHAR(36) NOT NULL, + "redcap_pid" BIGINT NOT NULL "created_at" BIGINT NOT NULL, "updated_on" BIGINT NOT NULL, PRIMARY KEY ("id"), @@ -835,8 +836,8 @@ CREATE TABLE IF NOT EXISTS "study_dashboard" ( ); -- Dumping data for table public.study_dashboard: 1 rows /*!40000 ALTER TABLE "study_dashboard" DISABLE KEYS */; -INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "reports", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "redcap_pid", "reports", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', 12345, '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_dashboard" ENABLE KEYS */; -- Dumping structure for table public.study_reference diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 3a5b1e2d..2cdcc030 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -951,21 +951,20 @@ CREATE TABLE IF NOT EXISTS "study_dashboard" ( "modules" UNKNOWN NOT NULL, "reports" UNKNOWN NOT NULL, "study_id" CHAR(36) NOT NULL, - "redcap_id" BIGINT NOT NULL, + "redcap_id" CHAR(36) NOT NULL, + "redcap_pid" BIGINT NOT NULL "created_at" BIGINT NOT NULL, "updated_on" BIGINT NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "study_dashboard_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION, CONSTRAINT "study_dashboard_redcap_id_fkey" FOREIGN KEY ("redcap_id") REFERENCES "study_redcap" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); - -- Dumping data for table public.study_dashboard: 1 rows /*!40000 ALTER TABLE "study_dashboard" DISABLE KEYS */; -INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "reports", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "redcap_pid", "reports", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', 12345, '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_dashboard" ENABLE KEYS */; - -- Dumping structure for table public.study_reference CREATE TABLE IF NOT EXISTS "study_reference" ( "id" CHAR(36) NOT NULL, diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index a42f87e5..7e008683 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -70,12 +70,12 @@ INSERT INTO "study_redcap" ("study_id", "id", "title", "api_pid", "api_url", "ap ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', 'data-stuff', '44444', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', 1, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', 'more-stuff', '55555', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', 1, '2023-08-13 16:23:48', '2023-08-13 16:23:49'); -INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "reports", "name", "modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006', '10000000-0000-0000-0000-000000000000', '{}', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', '20000000-0000-0000-0000-000000000000', '{}', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000008', '30000000-0000-0000-0000-000000000000', '{}', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', '40000000-0000-0000-0000-000000000000', '{}', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', '50000000-0000-0000-0000-000000000000', '{}', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "redcap_pid", "reports", "name", "modules", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006', '10000000-0000-0000-0000-000000000000', 1234, '{}', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', '20000000-0000-0000-0000-000000000000', 2345, '{}', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000008', '30000000-0000-0000-0000-000000000000', 3456, '{}', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', '40000000-0000-0000-0000-000000000000', 4567, '{}', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', '50000000-0000-0000-0000-000000000000', 5678, '{}', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; From af6b55a698b6177eb3c51e54e4abb10087ed1f60 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 20 Feb 2024 20:34:20 +0000 Subject: [PATCH 427/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index ff109d18..edfd9b92 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -304,9 +304,9 @@ def post(self, study_id: str): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[ - str, Any - ] = connect_redcap_project_dashboard_data.to_dict() + connect_redcap_project_dashboard: Dict[str, Any] = ( + connect_redcap_project_dashboard_data.to_dict() + ) return connect_redcap_project_dashboard, 201 @@ -427,9 +427,9 @@ def get(self, study_id: str, dashboard_id: str): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[ - str, Any - ] = redcap_project_dashboard_connector_query.to_dict() + redcap_project_dashboard_connector: Dict[str, Any] = ( + redcap_project_dashboard_connector_query.to_dict() + ) return redcap_project_dashboard_connector, 201 @@ -458,9 +458,9 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Get REDCap Project redcap_id = redcap_project_dashboard["redcap_id"] @@ -476,9 +476,9 @@ def get(self, study_id: str, dashboard_id: str): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( + report["report_id"] + ) # Structure REDCap ETL Config redcap_etl_config = { @@ -627,9 +627,9 @@ def put(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + update_redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From a1078c40024138bb554eb47d7ac1236d1bc1a858 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 20 Feb 2024 16:11:25 -0800 Subject: [PATCH 428/505] =?UTF-8?q?=F0=9F=9A=A9=20feat:=20feature=20flag?= =?UTF-8?q?=20signup=20API=20endpoint=20(#43)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🚩 feat: feature flag signup API endpoint * style: 🎨 fix code style issues with Black --------- Co-authored-by: Lint Action --- apis/authentication.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/apis/authentication.py b/apis/authentication.py index 2200e07f..36471a8b 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -55,6 +55,11 @@ class SignUpUser(Resource): def post(self): """signs up the new users and saves data in DB""" data: Union[Any, dict] = request.json + + # Check if the signup feature is enabled + if g.gb.is_on("signup") is False or g.gb.is_on("signup") is None: + return "Signup is disabled", 403 + if os.environ.get("FLASK_ENV") != "testing": bypassed_emails = [ "test@fairhub.io", From 912d79cbc8ec755f247a36686d4a3863892e9788 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 21 Feb 2024 11:02:54 -0800 Subject: [PATCH 429/505] =?UTF-8?q?=F0=9F=99=88=20chore:=20update=20gitign?= =?UTF-8?q?ore?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 4e0a9272..9c9e87e9 100644 --- a/.gitignore +++ b/.gitignore @@ -39,5 +39,5 @@ coverage .venv # Database -postgres_data/* postgres-data/* +redis-data/* From 135d4f444bd39864aafcb4c28c4e68e1c9306511 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Wed, 21 Feb 2024 11:26:08 -0800 Subject: [PATCH 430/505] =?UTF-8?q?chore=20=F0=9F=94=A8=20replaced=20name?= =?UTF-8?q?=20with=20family/=20given=20name=20in=20contributor=20(#44)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: modified dataset name * chore: added test for changed name contributor * style: 🎨 fix code style issues with Black --------- Co-authored-by: Lint Action --- .../31703d707dfb_contributor_name_change.py | 30 +++++++ apis/dataset_metadata/dataset_contributor.py | 10 ++- model/dataset_metadata/dataset_contributor.py | 12 ++- .../test_study_dataset_metadata_api.py | 79 ++++++++++++------- tests/functional/test_study_version_api.py | 25 +++--- 5 files changed, 112 insertions(+), 44 deletions(-) create mode 100644 alembic/versions/31703d707dfb_contributor_name_change.py diff --git a/alembic/versions/31703d707dfb_contributor_name_change.py b/alembic/versions/31703d707dfb_contributor_name_change.py new file mode 100644 index 00000000..60fa9d5f --- /dev/null +++ b/alembic/versions/31703d707dfb_contributor_name_change.py @@ -0,0 +1,30 @@ +"""contributor_name_change + +Revision ID: 31703d707dfb +Revises: 0defbfc71c59 +Create Date: 2024-02-20 08:25:11.511833 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "31703d707dfb" +down_revision: Union[str, None] = "0defbfc71c59" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.drop_column("dataset_contributor", "name") + op.add_column( + "dataset_contributor", sa.Column("family_name", sa.String, nullable=True) + ) + op.add_column("dataset_contributor", sa.Column("given_name", sa.String)) + op.execute("UPDATE dataset_contributor SET given_name = 'name'") + with op.batch_alter_table("dataset_contributor") as batch_op: + batch_op.alter_column("given_name", nullable=False) diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py index 69193f66..41336079 100644 --- a/apis/dataset_metadata/dataset_contributor.py +++ b/apis/dataset_metadata/dataset_contributor.py @@ -54,10 +54,11 @@ def post(self, study_id: int, dataset_id: int): "type": "string", "minLength": 1, }, - "name": { + "given_name": { "type": "string", "minLength": 1, }, + "family_name": {"type": ["string", "null"]}, "name_identifier": { "type": "string", "minLength": 1, @@ -103,7 +104,7 @@ def post(self, study_id: int, dataset_id: int): "required": [ "contributor_type", "name_type", - "name", + "given_name", "affiliations", "name_identifier", "name_identifier_scheme", @@ -194,10 +195,11 @@ def post(self, study_id: int, dataset_id: int): "additionalProperties": False, "properties": { "id": {"type": "string"}, - "name": { + "given_name": { "type": "string", "minLength": 1, }, + "family_name": {"type": ["string", "null"]}, "name_identifier": { "type": "string", "minLength": 1, @@ -242,7 +244,7 @@ def post(self, study_id: int, dataset_id: int): }, "required": [ "name_type", - "name", + "given_name", "affiliations", "name_identifier", "name_identifier_scheme", diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py index 943eae5d..6ecdb0d9 100644 --- a/model/dataset_metadata/dataset_contributor.py +++ b/model/dataset_metadata/dataset_contributor.py @@ -13,7 +13,8 @@ def __init__(self, dataset): __tablename__ = "dataset_contributor" id = db.Column(db.CHAR(36), primary_key=True) - name = db.Column(db.String, nullable=False) + family_name = db.Column(db.String, nullable=True) + given_name = db.Column(db.String, nullable=False) name_type = db.Column(db.String, nullable=True) name_identifier = db.Column(db.String, nullable=False) name_identifier_scheme = db.Column(db.String, nullable=False) @@ -29,7 +30,8 @@ def __init__(self, dataset): def to_dict(self): return { "id": self.id, - "name": self.name, + "given_name": self.given_name, + "family_name": self.family_name, "name_type": self.name_type, "name_identifier": self.name_identifier, "name_identifier_scheme": self.name_identifier_scheme, @@ -43,7 +45,8 @@ def to_dict(self): def to_dict_metadata(self): return { "id": self.id, - "name": self.name, + "given_name": self.given_name, + "family_name": self.family_name, "name_type": self.name_type, "contributor_type": self.contributor_type, "creator": self.creator, @@ -56,7 +59,8 @@ def from_data(dataset, data: dict): return dataset_contributor def update(self, data: dict): - self.name = data["name"] + self.given_name = data["given_name"] + self.family_name = data["family_name"] self.name_type = data["name_type"] self.name_identifier = data["name_identifier"] self.name_identifier_scheme = data["name_identifier_scheme"] diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index b1c8c88e..65efcd7c 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -486,7 +486,8 @@ def test_post_dataset_contributor_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", json=[ { - "name": "Name here", + "given_name": "Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -503,14 +504,17 @@ def test_post_dataset_contributor_metadata(clients): } ], ) + # Add a one second delay to prevent duplicate timestamps sleep(1) + response_data = json.loads(response.data) assert response.status_code == 201 response_data = json.loads(response.data) pytest.global_dataset_contributor_id = response_data[0]["id"] - assert response_data[0]["name"] == "Name here" + assert response_data[0]["given_name"] == "Given Name here" + assert response_data[0]["family_name"] == "Family Name here" assert response_data[0]["name_type"] == "Personal" assert response_data[0]["name_identifier"] == "Name identifier" assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -526,7 +530,8 @@ def test_post_dataset_contributor_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", json=[ { - "name": "Admin Name here", + "given_name": "Admin Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -550,13 +555,14 @@ def test_post_dataset_contributor_metadata(clients): admin_response_data = json.loads(admin_response.data) pytest.global_dataset_contributor_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0]["name"] == "Admin Name here" + assert admin_response_data[0]["given_name"] == "Admin Given Name here" editor_response = _editor_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", json=[ { - "name": "Editor Name here", + "given_name": "Editor Given Name here", + "family_name": "Editor Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -578,13 +584,14 @@ def test_post_dataset_contributor_metadata(clients): editor_response_data = json.loads(editor_response.data) pytest.global_dataset_contributor_id_editor = editor_response_data[0]["id"] - assert editor_response_data[0]["name"] == "Editor Name here" + assert editor_response_data[0]["given_name"] == "Editor Given Name here" viewer_response = _viewer_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", json=[ { - "name": "Viewer Name here", + "given_name": "Viewer Given Name here", + "family_name": "Viewer Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -601,7 +608,6 @@ def test_post_dataset_contributor_metadata(clients): } ], ) - assert viewer_response.status_code == 403 @@ -686,7 +692,8 @@ def test_post_dataset_creator_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", json=[ { - "name": "Name here", + "given_name": "Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -709,7 +716,8 @@ def test_post_dataset_creator_metadata(clients): response_data = json.loads(response.data) pytest.global_dataset_creator_id = response_data[0]["id"] - assert response_data[0]["name"] == "Name here" + assert response_data[0]["given_name"] == "Given Name here" + assert response_data[0]["family_name"] == "Family Name here" assert response_data[0]["name_type"] == "Personal" assert response_data[0]["name_identifier"] == "Name identifier" assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -724,7 +732,8 @@ def test_post_dataset_creator_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", json=[ { - "name": "admin Name here", + "given_name": "Admin Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -747,7 +756,8 @@ def test_post_dataset_creator_metadata(clients): admin_response_data = json.loads(admin_response.data) pytest.global_dataset_creator_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0]["name"] == "admin Name here" + assert admin_response_data[0]["given_name"] == "Admin Given Name here" + assert admin_response_data[0]["family_name"] == "Family Name here" assert admin_response_data[0]["name_type"] == "Personal" assert admin_response_data[0]["name_identifier"] == "Name identifier" assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -762,7 +772,8 @@ def test_post_dataset_creator_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", json=[ { - "name": "Editor Name here", + "given_name": "Editor Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -783,7 +794,8 @@ def test_post_dataset_creator_metadata(clients): editor_response_data = json.loads(editor_response.data) pytest.global_dataset_creator_id_editor = editor_response_data[0]["id"] - assert editor_response_data[0]["name"] == "Editor Name here" + assert editor_response_data[0]["given_name"] == "Editor Given Name here" + assert editor_response_data[0]["family_name"] == "Family Name here" assert editor_response_data[0]["name_type"] == "Personal" assert editor_response_data[0]["name_identifier"] == "Name identifier" assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -798,7 +810,8 @@ def test_post_dataset_creator_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", json=[ { - "name": "Viewer Name here", + "given_name": "Viewer Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -857,7 +870,8 @@ def test_get_dataset_creator_metadata(clients): assert len(viewer_response_data) == 3 assert response_data[0]["id"] == pytest.global_dataset_creator_id - assert response_data[0]["name"] == "Name here" + assert response_data[0]["given_name"] == "Given Name here" + assert response_data[0]["family_name"] == "Family Name here" assert response_data[0]["name_type"] == "Personal" assert response_data[0]["name_identifier"] == "Name identifier" assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -868,7 +882,8 @@ def test_get_dataset_creator_metadata(clients): assert response_data[0]["affiliations"][0]["scheme"] == "uh" assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" assert response_data[1]["id"] == pytest.global_dataset_creator_id_admin - assert response_data[1]["name"] == "admin Name here" + assert response_data[1]["given_name"] == "Admin Given Name here" + assert response_data[1]["family_name"] == "Family Name here" assert response_data[1]["name_type"] == "Personal" assert response_data[1]["name_identifier"] == "Name identifier" assert response_data[1]["name_identifier_scheme"] == "Name Scheme ID" @@ -879,7 +894,8 @@ def test_get_dataset_creator_metadata(clients): assert response_data[1]["affiliations"][0]["scheme"] == "uh" assert response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" assert response_data[2]["id"] == pytest.global_dataset_creator_id_editor - assert response_data[2]["name"] == "Editor Name here" + assert response_data[2]["given_name"] == "Editor Given Name here" + assert response_data[2]["family_name"] == "Family Name here" assert response_data[2]["name_type"] == "Personal" assert response_data[2]["name_identifier"] == "Name identifier" assert response_data[2]["name_identifier_scheme"] == "Name Scheme ID" @@ -890,7 +906,8 @@ def test_get_dataset_creator_metadata(clients): assert response_data[2]["affiliations"][0]["scheme"] == "uh" assert response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["name"] == "Name here" + assert admin_response_data[0]["given_name"] == "Given Name here" + assert admin_response_data[0]["family_name"] == "Family Name here" assert admin_response_data[0]["name_type"] == "Personal" assert admin_response_data[0]["name_identifier"] == "Name identifier" assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -900,7 +917,8 @@ def test_get_dataset_creator_metadata(clients): assert admin_response_data[0]["affiliations"][0]["identifier"] == "yes" assert admin_response_data[0]["affiliations"][0]["scheme"] == "uh" assert admin_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert admin_response_data[1]["name"] == "admin Name here" + assert admin_response_data[1]["given_name"] == "Admin Given Name here" + assert admin_response_data[1]["family_name"] == "Family Name here" assert admin_response_data[1]["name_type"] == "Personal" assert admin_response_data[1]["name_identifier"] == "Name identifier" assert admin_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" @@ -910,7 +928,8 @@ def test_get_dataset_creator_metadata(clients): assert admin_response_data[1]["affiliations"][0]["identifier"] == "yes" assert admin_response_data[1]["affiliations"][0]["scheme"] == "uh" assert admin_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert admin_response_data[2]["name"] == "Editor Name here" + assert admin_response_data[2]["given_name"] == "Editor Given Name here" + assert admin_response_data[2]["family_name"] == "Family Name here" assert admin_response_data[2]["name_type"] == "Personal" assert admin_response_data[2]["name_identifier"] == "Name identifier" assert admin_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" @@ -921,7 +940,8 @@ def test_get_dataset_creator_metadata(clients): assert admin_response_data[2]["affiliations"][0]["scheme"] == "uh" assert admin_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["name"] == "Name here" + assert editor_response_data[0]["given_name"] == "Given Name here" + assert editor_response_data[0]["family_name"] == "Family Name here" assert editor_response_data[0]["name_type"] == "Personal" assert editor_response_data[0]["name_identifier"] == "Name identifier" assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -931,7 +951,8 @@ def test_get_dataset_creator_metadata(clients): assert editor_response_data[0]["affiliations"][0]["identifier"] == "yes" assert editor_response_data[0]["affiliations"][0]["scheme"] == "uh" assert editor_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert editor_response_data[1]["name"] == "admin Name here" + assert editor_response_data[1]["given_name"] == "Admin Given Name here" + assert editor_response_data[1]["family_name"] == "Family Name here" assert editor_response_data[1]["name_type"] == "Personal" assert editor_response_data[1]["name_identifier"] == "Name identifier" assert editor_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" @@ -941,7 +962,8 @@ def test_get_dataset_creator_metadata(clients): assert editor_response_data[1]["affiliations"][0]["identifier"] == "yes" assert editor_response_data[1]["affiliations"][0]["scheme"] == "uh" assert editor_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert editor_response_data[2]["name"] == "Editor Name here" + assert editor_response_data[2]["given_name"] == "Editor Given Name here" + assert editor_response_data[2]["family_name"] == "Family Name here" assert editor_response_data[2]["name_type"] == "Personal" assert editor_response_data[2]["name_identifier"] == "Name identifier" assert editor_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" @@ -952,7 +974,8 @@ def test_get_dataset_creator_metadata(clients): assert editor_response_data[2]["affiliations"][0]["scheme"] == "uh" assert editor_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[0]["name"] == "Name here" + assert viewer_response_data[0]["given_name"] == "Given Name here" + assert viewer_response_data[0]["family_name"] == "Family Name here" assert viewer_response_data[0]["name_type"] == "Personal" assert viewer_response_data[0]["name_identifier"] == "Name identifier" assert viewer_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" @@ -962,7 +985,8 @@ def test_get_dataset_creator_metadata(clients): assert viewer_response_data[0]["affiliations"][0]["identifier"] == "yes" assert viewer_response_data[0]["affiliations"][0]["scheme"] == "uh" assert viewer_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[1]["name"] == "admin Name here" + assert viewer_response_data[1]["given_name"] == "Admin Given Name here" + assert viewer_response_data[1]["family_name"] == "Family Name here" assert viewer_response_data[1]["name_type"] == "Personal" assert viewer_response_data[1]["name_identifier"] == "Name identifier" assert viewer_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" @@ -972,7 +996,8 @@ def test_get_dataset_creator_metadata(clients): assert viewer_response_data[1]["affiliations"][0]["identifier"] == "yes" assert viewer_response_data[1]["affiliations"][0]["scheme"] == "uh" assert viewer_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[2]["name"] == "Editor Name here" + assert viewer_response_data[2]["given_name"] == "Editor Given Name here" + assert viewer_response_data[2]["family_name"] == "Family Name here" assert viewer_response_data[2]["name_type"] == "Personal" assert viewer_response_data[2]["name_identifier"] == "Name identifier" assert viewer_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index 5a67e23a..7851bdb7 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -612,7 +612,8 @@ def test_get_version_dataset_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", json=[ { - "name": "Name here", + "given_name": "Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -633,7 +634,8 @@ def test_get_version_dataset_metadata(clients): f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", json=[ { - "name": "Name here", + "given_name": "Given Name here", + "family_name": "Family Name here", "name_type": "Personal", "name_identifier": "Name identifier", "name_identifier_scheme": "Name Scheme ID", @@ -649,7 +651,6 @@ def test_get_version_dataset_metadata(clients): } ], ) - date_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/date", json=[{"date": 20210101, "type": "Type", "information": "Info"}], @@ -831,12 +832,14 @@ def test_get_version_dataset_metadata(clients): None, ) - assert response_data["contributors"][0]["name"] == "Name here" + assert response_data["contributors"][0]["given_name"] == "Given Name here" + assert response_data["contributors"][0]["family_name"] == "Family Name here" assert response_data["contributors"][0]["name_type"] == "Personal" assert response_data["contributors"][0]["contributor_type"] == "Con Type" assert response_data["dates"][0]["date"] == "01-01-1970" assert response_data["dates"][0]["type"] == "Type" - assert response_data["creators"][0]["name"] == "Name here" + assert response_data["creators"][0]["given_name"] == "Given Name here" + assert response_data["creators"][0]["family_name"] == "Family Name here" assert response_data["creators"][0]["name_type"] == "Personal" assert response_data["funders"][0]["name"] == "Name" assert response_data["funders"][0]["identifier"] == "Identifier" @@ -886,12 +889,14 @@ def test_get_version_dataset_metadata(clients): assert response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" assert response_data["related_items"][0]["type"] == "Type" - assert admin_response_data["contributors"][0]["name"] == "Name here" + assert admin_response_data["contributors"][0]["given_name"] == "Given Name here" + assert admin_response_data["contributors"][0]["family_name"] == "Family Name here" assert admin_response_data["contributors"][0]["name_type"] == "Personal" assert admin_response_data["contributors"][0]["contributor_type"] == "Con Type" assert admin_response_data["dates"][0]["date"] == "01-01-1970" assert admin_response_data["dates"][0]["type"] == "Type" - assert admin_response_data["creators"][0]["name"] == "Name here" + assert admin_response_data["creators"][0]["given_name"] == "Given Name here" + assert admin_response_data["creators"][0]["family_name"] == "Family Name here" assert admin_response_data["creators"][0]["name_type"] == "Personal" assert admin_response_data["funders"][0]["name"] == "Name" assert admin_response_data["funders"][0]["identifier"] == "Identifier" @@ -955,12 +960,14 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" assert admin_response_data["related_items"][0]["type"] == "Type" - assert editor_response_data["contributors"][0]["name"] == "Name here" + assert editor_response_data["contributors"][0]["family_name"] == "Family Name here" + assert editor_response_data["contributors"][0]["given_name"] == "Given Name here" assert editor_response_data["contributors"][0]["name_type"] == "Personal" assert editor_response_data["contributors"][0]["contributor_type"] == "Con Type" assert editor_response_data["dates"][0]["date"] == "01-01-1970" assert editor_response_data["dates"][0]["type"] == "Type" - assert editor_response_data["creators"][0]["name"] == "Name here" + assert editor_response_data["creators"][0]["given_name"] == "Given Name here" + assert editor_response_data["creators"][0]["family_name"] == "Family Name here" assert editor_response_data["creators"][0]["name_type"] == "Personal" assert editor_response_data["funders"][0]["name"] == "Name" assert editor_response_data["funders"][0]["identifier"] == "Identifier" From 6f0a60a199d0b2db90b9a9f42f95e5f5861ec998 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Wed, 21 Feb 2024 16:28:03 -0800 Subject: [PATCH 431/505] =?UTF-8?q?feat:=20=E2=9D=87=EF=B8=8F=20=20add=20d?= =?UTF-8?q?ataset=20healthsheet=20table=20(#46)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add dataset health sheet table * style: format * style: 🎨 fix code style issues with Black * fix: added relation of healthsheet to parent table --------- Co-authored-by: Lint Action --- model/__init__.py | 2 + model/dataset.py | 6 +++ model/dataset_metadata/dataset_healthsheet.py | 50 +++++++++++++++++++ 3 files changed, 58 insertions(+) create mode 100644 model/dataset_metadata/dataset_healthsheet.py diff --git a/model/__init__.py b/model/__init__.py index 0686115b..94eca65f 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -17,6 +17,7 @@ from .dataset_metadata.dataset_de_ident_level import DatasetDeIdentLevel from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder +from .dataset_metadata.dataset_healthsheet import DatasetHealthsheet from .dataset_metadata.dataset_other import DatasetOther from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights @@ -67,6 +68,7 @@ "DatasetOther", "DatasetAccess", "DatasetConsent", + "DatasetHealthsheet", "DatasetDate", "DatasetDeIdentLevel", "DatasetFunder", diff --git a/model/dataset.py b/model/dataset.py index 96bc4d41..32d3ba82 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -58,6 +58,12 @@ def __init__(self, study): cascade="all, delete", uselist=False, ) + dataset_healthsheet = db.relationship( + "DatasetHealthsheet", + back_populates="dataset", + cascade="all, delete", + uselist=False, + ) dataset_date = db.relationship( "DatasetDate", back_populates="dataset", diff --git a/model/dataset_metadata/dataset_healthsheet.py b/model/dataset_metadata/dataset_healthsheet.py new file mode 100644 index 00000000..e74c9682 --- /dev/null +++ b/model/dataset_metadata/dataset_healthsheet.py @@ -0,0 +1,50 @@ +from ..db import db + + +class DatasetHealthsheet(db.Model): # type: ignore + def __init__(self, dataset): + self.dataset = dataset + + __tablename__ = "dataset_healthsheet" + + motivation = db.Column(db.JSON, nullable=False) + composition = db.Column(db.JSON, nullable=False) + collection = db.Column(db.JSON, nullable=False) + preprocessing = db.Column(db.JSON, nullable=False) + uses = db.Column(db.JSON, nullable=False) + distribution = db.Column(db.JSON, nullable=False) + maintenance = db.Column(db.JSON, nullable=False) + + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) + dataset = db.relationship("Dataset", back_populates="dataset_healthsheet") + + def to_dict(self): + return { + "motivation": self.motivation, + "composition": self.composition, + "collection": self.collection, + "preprocessing": self.preprocessing, + "uses": self.uses, + "distribution": self.distribution, + "maintenance": self.maintenance, + "dataset_id": self.dataset_id, + } + + @staticmethod + def from_data(dataset, data: dict): + dataset_healthsheet = DatasetHealthsheet(dataset) + dataset_healthsheet.update(data) + return dataset_healthsheet + + def update(self, data: dict): + self.motivation = data["motivation"] + self.composition = data["composition"] + # self.collection = data["collection"] + # self.preprocessing = data["preprocessing"] + # self.uses = data["uses"] + # self.distribution = data["distribution"] + # self.maintenance = data["maintenance"] + # self.dataset_id = data["dataset_id"] + self.dataset_id.touch_dataset() From 88778fcd172a30c3c79ca7ba12f62523e549c832 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Wed, 21 Feb 2024 17:25:56 -0800 Subject: [PATCH 432/505] =?UTF-8?q?feat:=20=E2=9D=87=EF=B8=8F=20add=20publ?= =?UTF-8?q?ished=20dataset=20table=20(#45)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added published dataset table * style: 🎨 fix code style issues with Black * style: format * fix: update published dataset table --------- Co-authored-by: Lint Action --- model/__init__.py | 2 ++ model/published_dataset.py | 47 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 model/published_dataset.py diff --git a/model/__init__.py b/model/__init__.py index 94eca65f..1adc44d8 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -28,6 +28,7 @@ from .invited_study_contributor import StudyInvitedContributor from .notification import Notification from .participant import Participant +from .published_dataset import PublishedDataset from .study import Study, StudyException from .study_contributor import StudyContributor from .study_dashboard import StudyDashboard @@ -60,6 +61,7 @@ "Study", "Dataset", "Participant", + "PublishedDataset", "Version", "db", "User", diff --git a/model/published_dataset.py b/model/published_dataset.py new file mode 100644 index 00000000..f70e8309 --- /dev/null +++ b/model/published_dataset.py @@ -0,0 +1,47 @@ +import uuid +import datetime +from datetime import timezone + +from .db import db + + +class PublishedDataset(db.Model): # type: ignore + """A published dataset is a collection of published datasets""" + + def __init__(self): + self.id = str(uuid.uuid4()) + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + + __tablename__ = "published_dataset" + + id = db.Column(db.CHAR(36), primary_key=True) + study_id = db.Column(db.String, nullable=False) + dataset_id = db.Column(db.String, nullable=False) + version_id = db.Column(db.String, nullable=False) + doi = db.Column(db.String, nullable=False) + title = db.Column(db.String, nullable=False) + description = db.Column(db.String, nullable=False) + version_title = db.Column(db.String, nullable=False) + study_title = db.Column(db.String, nullable=False) + published_metadata = db.Column(db.JSON, nullable=False) + files = db.Column(db.JSON, nullable=False) + data = db.Column(db.JSON, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + + def to_dict(self): + """Converts the published dataset to a dictionary""" + return { + "id": self.id, + "study_id": self.study_id, + "dataset_id": self.dataset_id, + "version_id": self.version_id, + "doi": self.doi, + "title": self.title, + "description": self.description, + "version_title": self.version_title, + "study_title": self.study_title, + "published_metadata": self.published_metadata, + "files": self.files, + "data": self.data, + "created_at": self.created_at, + } From 6e49067051a8b561b54d7f34810ffe141f4d5fe5 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Mon, 26 Feb 2024 17:08:14 -0800 Subject: [PATCH 433/505] =?UTF-8?q?feat:=20=E2=9D=87=EF=B8=8F=20=20add=20h?= =?UTF-8?q?ealtsheet=20endpoints=20(#47)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add health sheet endpoints * style: 🎨 fix code style issues with Black * feat: add health sheet test functions * style: format * style: 🎨 fix code style issues with Black --------- Co-authored-by: Lint Action --- apis/__init__.py | 2 + apis/dataset_metadata/dataset_healthsheet.py | 381 +++++++++ model/dataset.py | 1 + model/dataset_metadata/dataset_healthsheet.py | 30 +- model/published_dataset.py | 2 +- .../test_study_dataset_metadata_api.py | 723 +++++++++++++++++- 6 files changed, 1128 insertions(+), 11 deletions(-) create mode 100644 apis/dataset_metadata/dataset_healthsheet.py diff --git a/apis/__init__.py b/apis/__init__.py index da3a02d0..2dba1c7d 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -17,6 +17,7 @@ from .dataset_metadata.dataset_de_ident_level import api as de_ident_level from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder +from .dataset_metadata.dataset_healthsheet import api as healthsheet from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_record_keys import api as record_keys from .dataset_metadata.dataset_related_item import api as related_item @@ -61,6 +62,7 @@ "access", "alternate_identifier", "consent", + "healthsheet", "date", "de_ident_level", "description", diff --git a/apis/dataset_metadata/dataset_healthsheet.py b/apis/dataset_metadata/dataset_healthsheet.py new file mode 100644 index 00000000..f6d7c792 --- /dev/null +++ b/apis/dataset_metadata/dataset_healthsheet.py @@ -0,0 +1,381 @@ +"""API endpoints for dataset healthsheet""" + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +# +dataset_health_sheet_motivation = api.model( + "DatasetHealthSheetMotivation", + { + "motivation": fields.String(required=True), + }, +) +dataset_health_sheet_composition = api.model( + "DatasetHealthSheetComposition", + { + "composition": fields.String(required=True), + }, +) +dataset_health_sheet_collection = api.model( + "DatasetHealthSheetCollection", + { + "collection": fields.String(required=True), + }, +) +dataset_health_sheet_preprocessing = api.model( + "DatasetHealthSheetPreprocessing", + { + "preprocessing": fields.String(required=True), + }, +) +dataset_health_sheet_uses = api.model( + "DatasetHealthSheetUses", + { + "uses": fields.String(required=True), + }, +) +dataset_health_sheet_distribution = api.model( + "DatasetHealthSheetDistribution", + { + "distribution": fields.String(required=True), + }, +) +dataset_health_sheet_maintenance = api.model( + "DatasetHealthSheetMaintenance", + { + "maintenance": fields.String(required=True), + }, +) + + +@api.route("/study//dataset//healthsheet/motivation") +class DatasetHealthsheetMotivation(Resource): + """Dataset health sheet motivation""" + + @api.doc("health sheet motivation") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_health_sheet_motivation) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset health sheet motivation""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_health_sheet_ = dataset_.dataset_healthsheet + + return {"motivation": dataset_health_sheet_.motivation}, 200 + + @api.doc("health sheet motivation") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + """Update dataset health sheet motivation""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "motivation": {"type": "string"}, + }, + "required": [ + "motivation", + ], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_healthsheet.update(data) + model.db.session.commit() + return {"motivation": dataset_.dataset_healthsheet.motivation}, 200 + + +@api.route("/study//dataset//healthsheet/maintenance") +class DatasetHealthSheetMaintenance(Resource): + """Dataset health sheet maintenance""" + + @api.doc("health sheet maintenance") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_health_sheet_maintenance) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset healthsheet maintenance""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_health_sheet_ = dataset_.dataset_healthsheet + + return {"maintenance": dataset_health_sheet_.maintenance}, 200 + + @api.doc("healthSheet maintenance") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + """Update dataset health sheet maintenance""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "maintenance": {"type": "string"}, + }, + "required": [ + "maintenance", + ], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_healthsheet.update(data) + model.db.session.commit() + return {"maintenance": dataset_.dataset_healthsheet.maintenance}, 200 + + +@api.route("/study//dataset//healthsheet/composition") +class DatasetHealthSheetComposition(Resource): + """Dataset healthsheet composition""" + + @api.doc("health sheet composition") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_health_sheet_composition) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset health sheet composition""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_health_sheet_ = dataset_.dataset_healthsheet + + return {"composition": dataset_health_sheet_.composition}, 200 + + @api.doc("health sheet composition") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + """Update dataset health sheet composition""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "composition": {"type": "string"}, + }, + "required": [ + "composition", + ], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_healthsheet.update(data) + model.db.session.commit() + return {"composition": dataset_.dataset_healthsheet.composition}, 200 + + +@api.route("/study//dataset//healthsheet/collection") +class DatasetHealthSheetCollection(Resource): + """Dataset health sheet Resource""" + + @api.doc("health sheet collection") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_health_sheet_collection) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset health sheet collection""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_health_sheet_ = dataset_.dataset_healthsheet + + return {"collection": dataset_health_sheet_.collection}, 200 + + @api.doc("healthsheet collection") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + """Update dataset health sheet collection""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "collection": {"type": "string"}, + }, + "required": [ + "collection", + ], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_healthsheet.update(data) + model.db.session.commit() + return {"collection": dataset_.dataset_healthsheet.collection}, 200 + + +@api.route("/study//dataset//healthsheet/preprocessing") +class DatasetHealthSheetPreprocessing(Resource): + """Dataset health sheet preprocessing""" + + @api.doc("health sheet preprocessing") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_health_sheet_preprocessing) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset health sheet collection""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_health_sheet_ = dataset_.dataset_healthsheet + + return {"preprocessing": dataset_health_sheet_.preprocessing}, 200 + + @api.doc("healthsheet preprocessing") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + """Update dataset healthsheet preprocessing""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "preprocessing": {"type": "string"}, + }, + "required": [ + "preprocessing", + ], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_healthsheet.update(data) + model.db.session.commit() + return {"preprocessing": dataset_.dataset_healthsheet.preprocessing}, 200 + + +@api.route("/study//dataset//healthsheet/uses") +class DatasetHealthSheetUses(Resource): + """Dataset healthsheet uses Resource""" + + @api.doc("health sheet uses") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_health_sheet_uses) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset health sheet collection""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_health_sheet_ = dataset_.dataset_healthsheet + + return {"uses": dataset_health_sheet_.uses}, 200 + + @api.doc("health sheet uses") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + """Update dataset health sheet uses""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "uses": {"type": "string"}, + }, + "required": [ + "uses", + ], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_healthsheet.update(data) + model.db.session.commit() + return {"uses": dataset_.dataset_healthsheet.uses}, 200 + + +@api.route("/study//dataset//healthsheet/distribution") +class DatasetHealthSheetDistribution(Resource): + """Dataset health sheet distribution Resource""" + + @api.doc("health sheet distribution distribution") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_health_sheet_distribution) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset health sheet collection""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_health_sheet_ = dataset_.dataset_healthsheet + + return {"distribution": dataset_health_sheet_.distribution}, 200 + + @api.doc("healthsheet distribution") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int, dataset_id: int): + """Update dataset health sheet uses""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "distribution": {"type": "string"}, + }, + "required": [ + "distribution", + ], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_healthsheet.update(data) + model.db.session.commit() + return {"distribution": dataset_.dataset_healthsheet.distribution}, 200 diff --git a/model/dataset.py b/model/dataset.py index 32d3ba82..2770881f 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -20,6 +20,7 @@ def __init__(self, study): self.dataset_record_keys = model.DatasetRecordKeys(self) self.dataset_de_ident_level = model.DatasetDeIdentLevel(self) self.dataset_consent = model.DatasetConsent(self) + self.dataset_healthsheet = model.DatasetHealthsheet(self) self.dataset_other = model.DatasetOther(self) self.dataset_title.append(model.DatasetTitle(self)) diff --git a/model/dataset_metadata/dataset_healthsheet.py b/model/dataset_metadata/dataset_healthsheet.py index e74c9682..924e870f 100644 --- a/model/dataset_metadata/dataset_healthsheet.py +++ b/model/dataset_metadata/dataset_healthsheet.py @@ -4,6 +4,13 @@ class DatasetHealthsheet(db.Model): # type: ignore def __init__(self, dataset): self.dataset = dataset + self.motivation = "[]" + self.composition = "[]" + self.collection = "[]" + self.preprocessing = "[]" + self.uses = "[]" + self.distribution = "[]" + self.maintenance = "[]" __tablename__ = "dataset_healthsheet" @@ -39,12 +46,17 @@ def from_data(dataset, data: dict): return dataset_healthsheet def update(self, data: dict): - self.motivation = data["motivation"] - self.composition = data["composition"] - # self.collection = data["collection"] - # self.preprocessing = data["preprocessing"] - # self.uses = data["uses"] - # self.distribution = data["distribution"] - # self.maintenance = data["maintenance"] - # self.dataset_id = data["dataset_id"] - self.dataset_id.touch_dataset() + if "motivation" in data: + self.motivation = data["motivation"] + if "composition" in data: + self.composition = data["composition"] + if "collection" in data: + self.collection = data["collection"] + if "preprocessing" in data: + self.preprocessing = data["preprocessing"] + if "uses" in data: + self.uses = data["uses"] + if "distribution" in data: + self.distribution = data["distribution"] + if "maintenance" in data: + self.maintenance = data["maintenance"] diff --git a/model/published_dataset.py b/model/published_dataset.py index f70e8309..42061d70 100644 --- a/model/published_dataset.py +++ b/model/published_dataset.py @@ -1,5 +1,5 @@ -import uuid import datetime +import uuid from datetime import timezone from .db import db diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 65efcd7c..d66ed259 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -1692,7 +1692,728 @@ def test_delete_dataset_description_metadata(clients): assert editor_response.status_code == 204 -# ------------------- FUNDER METADATA ------------------- # +# ------------------- DATASET HEALTHSHEET MOTIVATION METADATA ------------------- # +def test_put_healthsheet_motivation_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_motivation_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# # ------------------- DATASET HEALTHSHEET COMPOSITION METADATA ------------------- # +def test_put_healthsheet_composition_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/composition' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet composition metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_composition_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/composition' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET COLLECTION METADATA ------------------- # +def test_put_healthsheet_collection_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/collection' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet collection metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_collection_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/collection' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET PREPROCESSING METADATA ------------------- # +def test_put_healthsheet_preprocessing_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet preprocessing metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_preprocessing_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# # ------------------- DATASET HEALTHSHEET USES METADATA ------------------- # +def test_put_healthsheet_uses_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/uses' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet uses metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_uses_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/uses' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + description metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET DISTRIBUTION METADATA ------------------- # +def test_put_healthsheet_distribution_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/distribution' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet distribution metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_distribution_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + distribution metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET HEALTHSHEET MAINTENANCE METADATA ------------------- # +def test_put_healthsheet_maintenance_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/healthsheet/maintenance' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet maintenance metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_maintenance_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + maintenance metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + # Editor was the last successful PUT request, so the response data should match + + +# ------------------- DATASET FUNDER METADATA ------------------- # def test_post_dataset_funder_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID From 489583ba132827c65647c00573e9fb0a334fe0e1 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 17:37:04 -0800 Subject: [PATCH 434/505] =?UTF-8?q?=F0=9F=9A=9A=20chore:=20update=20path?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- caching/__init__.py => caching.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename caching/__init__.py => caching.py (78%) diff --git a/caching/__init__.py b/caching.py similarity index 78% rename from caching/__init__.py rename to caching.py index fc85c7e6..b4967ef3 100644 --- a/caching/__init__.py +++ b/caching.py @@ -3,7 +3,7 @@ cache = Cache( config={ - key.replace(f"FAIRHUB_", ""): value + key.replace("FAIRHUB_", ""): value for key, value in config.items() if "CACHE" in key } From dde156753380c91288be76f5ccf9ccdd30ea95e6 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 17:42:04 -0800 Subject: [PATCH 435/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20remove=20old=20wor?= =?UTF-8?q?kflows?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/deploy-app-to-main-slot.yml | 67 ------------------- .../workflows/deploy-app-to-staging-slot.yml | 67 ------------------- 2 files changed, 134 deletions(-) delete mode 100644 .github/workflows/deploy-app-to-main-slot.yml delete mode 100644 .github/workflows/deploy-app-to-staging-slot.yml diff --git a/.github/workflows/deploy-app-to-main-slot.yml b/.github/workflows/deploy-app-to-main-slot.yml deleted file mode 100644 index 66d0a63b..00000000 --- a/.github/workflows/deploy-app-to-main-slot.yml +++ /dev/null @@ -1,67 +0,0 @@ -# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy -# More GitHub Actions for Azure: https://github.com/Azure/actions -# More info on Python, GitHub Actions, and Azure App Service: https://aka.ms/python-webapps-actions - -name: Build and deploy Python app to Azure Web App - api-fairhub-io - -on: - push: - branches: - - main - pull_request: - types: [opened, synchronize, reopened, closed] - branches: - - main - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python version - uses: actions/setup-python@v1 - with: - python-version: "3.10" - - - name: Create and start virtual environment - run: | - python -m venv venv - source venv/bin/activate - - - name: Install dependencies - run: pip install poetry==1.3.2 && poetry install - - # Optional: Add step to run tests here (PyTest, Django test suites, etc.) - - - name: Upload artifact for deployment jobs - uses: actions/upload-artifact@v2 - with: - name: python-app - path: | - . - !venv/ - - deploy: - runs-on: ubuntu-latest - needs: build - environment: - name: "main" - url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} - - steps: - - name: Download artifact from build job - uses: actions/download-artifact@v2 - with: - name: python-app - path: . - - - name: "Deploy to Azure Web App" - uses: azure/webapps-deploy@v2 - id: deploy-to-webapp - with: - app-name: "api-fairhub-io" - slot-name: "main" - publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_6C6B1227CD464BFDBC9173E57CDEEC65 }} diff --git a/.github/workflows/deploy-app-to-staging-slot.yml b/.github/workflows/deploy-app-to-staging-slot.yml deleted file mode 100644 index 9b0eaf20..00000000 --- a/.github/workflows/deploy-app-to-staging-slot.yml +++ /dev/null @@ -1,67 +0,0 @@ -# Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy -# More GitHub Actions for Azure: https://github.com/Azure/actions -# More info on Python, GitHub Actions, and Azure App Service: https://aka.ms/python-webapps-actions - -name: Build and deploy Python app to Azure Web App - api-fairhub-io - -on: - push: - branches: - - staging - pull_request: - types: [opened, synchronize, reopened, closed] - branches: - - staging - workflow_dispatch: - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - - name: Set up Python version - uses: actions/setup-python@v1 - with: - python-version: "3.10" - - - name: Create and start virtual environment - run: | - python -m venv venv - source venv/bin/activate - - - name: Install dependencies - run: pip install poetry==1.3.2 && poetry install - - # Optional: Add step to run tests here (PyTest, Django test suites, etc.) - - - name: Upload artifact for deployment jobs - uses: actions/upload-artifact@v2 - with: - name: python-app - path: | - . - !venv/ - - deploy: - runs-on: ubuntu-latest - needs: build - environment: - name: "staging" - url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} - - steps: - - name: Download artifact from build job - uses: actions/download-artifact@v2 - with: - name: python-app - path: . - - - name: "Deploy to Azure Web App" - uses: azure/webapps-deploy@v2 - id: deploy-to-webapp - with: - app-name: "api-fairhub-io" - slot-name: "staging" - publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_STAGING }} From f635f4f8ee52cdb53d1c7042c1dab60397a50ca1 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 17:57:10 -0800 Subject: [PATCH 436/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20fix=20build?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 42 +++++++++---------- app.py | 4 +- .../0defbfc71c59_add_identifier_to_version.py | 0 .../29e42ce4be3f_adding_license_text.py | 0 .../31703d707dfb_contributor_name_change.py | 0 ...2ac2b020c7c_delete_dataset_readme_table.py | 0 6 files changed, 22 insertions(+), 24 deletions(-) rename {alembic => dev/alembic.old}/versions/0defbfc71c59_add_identifier_to_version.py (100%) rename {alembic => dev/alembic.old}/versions/29e42ce4be3f_adding_license_text.py (100%) rename {alembic => dev/alembic.old}/versions/31703d707dfb_contributor_name_change.py (100%) rename {alembic => dev/alembic.old}/versions/72ac2b020c7c_delete_dataset_readme_table.py (100%) diff --git a/apis/dashboard.py b/apis/dashboard.py index edfd9b92..47192d36 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -3,13 +3,11 @@ from typing import Any, Dict, List, Union from flask import request - -# from flask_caching import Cache from flask_restx import Namespace, Resource, fields from jsonschema import ValidationError, validate +import caching import model -from caching import cache from modules.etl import ModuleTransform, RedcapTransform from modules.etl.config import moduleTransformConfigs, redcapTransformConfig @@ -304,9 +302,9 @@ def post(self, study_id: str): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 @@ -427,9 +425,9 @@ def get(self, study_id: str, dashboard_id: str): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[str, Any] = ( - redcap_project_dashboard_connector_query.to_dict() - ) + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() return redcap_project_dashboard_connector, 201 @@ -448,7 +446,7 @@ def get(self, study_id: str, dashboard_id: str): return "Access denied, you can not get this dashboard", 403 # Retrieve Dashboard Redis Cache - cached_redcap_project_dashboard = cache.get( + cached_redcap_project_dashboard = caching.cache.get( f"$study_id#{study_id}$dashboard_id#{dashboard_id}" ) @@ -458,9 +456,9 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Get REDCap Project redcap_id = redcap_project_dashboard["redcap_id"] @@ -476,9 +474,9 @@ def get(self, study_id: str, dashboard_id: str): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -510,7 +508,7 @@ def get(self, study_id: str, dashboard_id: str): } # Create Dashboard Redis Cache - cache.set( + caching.cache.set( f"$study_id#{study_id}$dashboard_id#{dashboard_id}", redcap_project_dashboard, ) @@ -627,12 +625,12 @@ def put(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Clear Dashboard from Redis Cache - cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") + caching.cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") return update_redcap_project_dashboard, 201 @@ -763,7 +761,7 @@ def delete(self, study_id: str, dashboard_id: str): # ) # # Clear Dashboard from Redis Cache -# cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") +# caching.cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") # return update_redcap_project_dashboard, 201 diff --git a/app.py b/app.py index ab51eeb4..94c04970 100644 --- a/app.py +++ b/app.py @@ -14,12 +14,12 @@ from sqlalchemy import MetaData, inspect from waitress import serve +import caching import config import model from apis import api from apis.authentication import UnauthenticatedException, authentication, authorization from apis.exception import ValidationException -from caching import cache # from pyfairdatatools import __version__ @@ -65,7 +65,7 @@ def create_app(config_module=None, loglevel="INFO"): model.db.init_app(app) api.init_app(app) bcrypt.init_app(app) - cache.init_app(app) + caching.cache.init_app(app) cors_origins = [ "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string diff --git a/alembic/versions/0defbfc71c59_add_identifier_to_version.py b/dev/alembic.old/versions/0defbfc71c59_add_identifier_to_version.py similarity index 100% rename from alembic/versions/0defbfc71c59_add_identifier_to_version.py rename to dev/alembic.old/versions/0defbfc71c59_add_identifier_to_version.py diff --git a/alembic/versions/29e42ce4be3f_adding_license_text.py b/dev/alembic.old/versions/29e42ce4be3f_adding_license_text.py similarity index 100% rename from alembic/versions/29e42ce4be3f_adding_license_text.py rename to dev/alembic.old/versions/29e42ce4be3f_adding_license_text.py diff --git a/alembic/versions/31703d707dfb_contributor_name_change.py b/dev/alembic.old/versions/31703d707dfb_contributor_name_change.py similarity index 100% rename from alembic/versions/31703d707dfb_contributor_name_change.py rename to dev/alembic.old/versions/31703d707dfb_contributor_name_change.py diff --git a/alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py b/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py similarity index 100% rename from alembic/versions/72ac2b020c7c_delete_dataset_readme_table.py rename to dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py From 869e1aaf900bd1d5a55e14e41f4824539d8ca1c3 Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 27 Feb 2024 01:57:38 +0000 Subject: [PATCH 437/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 47192d36..0e879091 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -302,9 +302,9 @@ def post(self, study_id: str): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[ - str, Any - ] = connect_redcap_project_dashboard_data.to_dict() + connect_redcap_project_dashboard: Dict[str, Any] = ( + connect_redcap_project_dashboard_data.to_dict() + ) return connect_redcap_project_dashboard, 201 @@ -425,9 +425,9 @@ def get(self, study_id: str, dashboard_id: str): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[ - str, Any - ] = redcap_project_dashboard_connector_query.to_dict() + redcap_project_dashboard_connector: Dict[str, Any] = ( + redcap_project_dashboard_connector_query.to_dict() + ) return redcap_project_dashboard_connector, 201 @@ -456,9 +456,9 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Get REDCap Project redcap_id = redcap_project_dashboard["redcap_id"] @@ -474,9 +474,9 @@ def get(self, study_id: str, dashboard_id: str): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( + report["report_id"] + ) # Structure REDCap ETL Config redcap_etl_config = { @@ -625,9 +625,9 @@ def put(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + update_redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Clear Dashboard from Redis Cache caching.cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From c63d5df81706f86a1ae10bb3680c5f555c63fdcc Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 18:02:29 -0800 Subject: [PATCH 438/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20fix=20build?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 9292fd57..2ecafd6e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,6 +21,7 @@ COPY model ./model COPY core ./core COPY app.py . COPY config.py . +COPY caching.py . COPY alembic ./alembic COPY alembic.ini . From 050fc6ac6bab2f8b7714afd108809fa8871a3efe Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 18:04:02 -0800 Subject: [PATCH 439/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20fix=20build?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 0e879091..47192d36 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -302,9 +302,9 @@ def post(self, study_id: str): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 @@ -425,9 +425,9 @@ def get(self, study_id: str, dashboard_id: str): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[str, Any] = ( - redcap_project_dashboard_connector_query.to_dict() - ) + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() return redcap_project_dashboard_connector, 201 @@ -456,9 +456,9 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Get REDCap Project redcap_id = redcap_project_dashboard["redcap_id"] @@ -474,9 +474,9 @@ def get(self, study_id: str, dashboard_id: str): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -625,9 +625,9 @@ def put(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Clear Dashboard from Redis Cache caching.cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From 3490822b21c416c821fd38ed3f4e1d7332d198db Mon Sep 17 00:00:00 2001 From: Lint Action Date: Tue, 27 Feb 2024 02:04:27 +0000 Subject: [PATCH 440/505] =?UTF-8?q?style:=20=F0=9F=8E=A8=20fix=20code=20st?= =?UTF-8?q?yle=20issues=20with=20Black?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 47192d36..0e879091 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -302,9 +302,9 @@ def post(self, study_id: str): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[ - str, Any - ] = connect_redcap_project_dashboard_data.to_dict() + connect_redcap_project_dashboard: Dict[str, Any] = ( + connect_redcap_project_dashboard_data.to_dict() + ) return connect_redcap_project_dashboard, 201 @@ -425,9 +425,9 @@ def get(self, study_id: str, dashboard_id: str): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[ - str, Any - ] = redcap_project_dashboard_connector_query.to_dict() + redcap_project_dashboard_connector: Dict[str, Any] = ( + redcap_project_dashboard_connector_query.to_dict() + ) return redcap_project_dashboard_connector, 201 @@ -456,9 +456,9 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Get REDCap Project redcap_id = redcap_project_dashboard["redcap_id"] @@ -474,9 +474,9 @@ def get(self, study_id: str, dashboard_id: str): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"][ - "report_id" - ] = report["report_id"] + redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( + report["report_id"] + ) # Structure REDCap ETL Config redcap_etl_config = { @@ -625,9 +625,9 @@ def put(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() + update_redcap_project_dashboard: Dict[str, Any] = ( + redcap_project_dashboard_query.to_dict() + ) # Clear Dashboard from Redis Cache caching.cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From ab971d5234a8907a77ba04727f90faa0805ab2ca Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 18:07:03 -0800 Subject: [PATCH 441/505] =?UTF-8?q?=F0=9F=9A=A8=20chore:=20remove=20format?= =?UTF-8?q?=20action?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/format.yml | 39 ------------------------------------ apis/dashboard.py | 30 +++++++++++++-------------- 2 files changed, 15 insertions(+), 54 deletions(-) delete mode 100644 .github/workflows/format.yml diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml deleted file mode 100644 index e7a90983..00000000 --- a/.github/workflows/format.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: Format - -on: - push: - branches: - - "**" - pull_request: - branches: - - "main" - -jobs: - run-formatter: - name: Run formatter - runs-on: ubuntu-latest - - steps: - - name: Check out Git repository - uses: actions/checkout@v3 - - - name: Set up Node.js - uses: actions/setup-node@v3 - with: - node-version: 16 - - - name: Set up Python - uses: actions/setup-python@v3 - with: - python-version: 3.10.4 - - - name: Install Python dependencies - run: pip install black - - - name: Format with Black - uses: wearerequired/lint-action@v2 - with: - black: true - auto_fix: true - commit_message: "style: 🎨 fix code style issues with ${linter}" - github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/apis/dashboard.py b/apis/dashboard.py index 0e879091..47192d36 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -302,9 +302,9 @@ def post(self, study_id: str): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 @@ -425,9 +425,9 @@ def get(self, study_id: str, dashboard_id: str): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[str, Any] = ( - redcap_project_dashboard_connector_query.to_dict() - ) + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() return redcap_project_dashboard_connector, 201 @@ -456,9 +456,9 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Get REDCap Project redcap_id = redcap_project_dashboard["redcap_id"] @@ -474,9 +474,9 @@ def get(self, study_id: str, dashboard_id: str): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -625,9 +625,9 @@ def put(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Clear Dashboard from Redis Cache caching.cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") From 3ec71fe6a302985d1660fe1bb524f0a7ae01fcde Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 18:08:02 -0800 Subject: [PATCH 442/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20fix=20build?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-and-deploy-to-main.yml | 7 ------- .github/workflows/build-and-deploy-to-staging.yml | 7 ------- 2 files changed, 14 deletions(-) diff --git a/.github/workflows/build-and-deploy-to-main.yml b/.github/workflows/build-and-deploy-to-main.yml index e41fff74..3bf82164 100644 --- a/.github/workflows/build-and-deploy-to-main.yml +++ b/.github/workflows/build-and-deploy-to-main.yml @@ -27,13 +27,6 @@ jobs: - name: Checkout uses: actions/checkout@v2 - - name: Wait for format check - uses: lewagon/wait-on-check-action@v1.3.1 - with: - ref: main - repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: "Run formatter" - - name: Wait for linting to pass uses: lewagon/wait-on-check-action@v1.3.1 with: diff --git a/.github/workflows/build-and-deploy-to-staging.yml b/.github/workflows/build-and-deploy-to-staging.yml index 5cd20ba8..c372da1f 100644 --- a/.github/workflows/build-and-deploy-to-staging.yml +++ b/.github/workflows/build-and-deploy-to-staging.yml @@ -30,13 +30,6 @@ jobs: - name: Checkout uses: actions/checkout@v2 - - name: Wait for format check - uses: lewagon/wait-on-check-action@v1.3.1 - with: - ref: staging - repo-token: ${{ secrets.GITHUB_TOKEN }} - check-name: "Run formatter" - - name: Wait for linting to pass uses: lewagon/wait-on-check-action@v1.3.1 with: From 137e723933d882ea5a833db440479355a0f19f04 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 26 Feb 2024 18:18:18 -0800 Subject: [PATCH 443/505] =?UTF-8?q?=F0=9F=92=9A=20ci:=20fix=20build?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 2ecafd6e..b25c9554 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,6 +19,7 @@ RUN poetry install COPY apis ./apis COPY model ./model COPY core ./core +COPY modules ./modules COPY app.py . COPY config.py . COPY caching.py . From 6e19ce2aace9dd997296bc5cd0f6d61472aadcc6 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Wed, 28 Feb 2024 16:28:33 -0800 Subject: [PATCH 444/505] =?UTF-8?q?=E2=9C=A8feat:=20add=20participant=20li?= =?UTF-8?q?st=20report?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/dashboard.py | 30 +++--- modules/etl/config/aireadi_config.py | 101 ++++++++++++++++----- modules/etl/transforms/module_transform.py | 16 +++- modules/etl/vtypes/timeseries.py | 12 +-- 4 files changed, 109 insertions(+), 50 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index edfd9b92..ff109d18 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -304,9 +304,9 @@ def post(self, study_id: str): ) model.db.session.add(connect_redcap_project_dashboard_data) model.db.session.commit() - connect_redcap_project_dashboard: Dict[str, Any] = ( - connect_redcap_project_dashboard_data.to_dict() - ) + connect_redcap_project_dashboard: Dict[ + str, Any + ] = connect_redcap_project_dashboard_data.to_dict() return connect_redcap_project_dashboard, 201 @@ -427,9 +427,9 @@ def get(self, study_id: str, dashboard_id: str): model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard_connector: Dict[str, Any] = ( - redcap_project_dashboard_connector_query.to_dict() - ) + redcap_project_dashboard_connector: Dict[ + str, Any + ] = redcap_project_dashboard_connector_query.to_dict() return redcap_project_dashboard_connector, 201 @@ -458,9 +458,9 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) - redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Get REDCap Project redcap_id = redcap_project_dashboard["redcap_id"] @@ -476,9 +476,9 @@ def get(self, study_id: str, dashboard_id: str): report["report_key"] == report_config["key"] and len(report["report_id"]) > 0 ): - redcapTransformConfig["reports"][i]["kwdargs"]["report_id"] = ( - report["report_id"] - ) + redcapTransformConfig["reports"][i]["kwdargs"][ + "report_id" + ] = report["report_id"] # Structure REDCap ETL Config redcap_etl_config = { @@ -627,9 +627,9 @@ def put(self, study_id: str, dashboard_id: str): redcap_project_dashboard_query.update(data) model.db.session.commit() - update_redcap_project_dashboard: Dict[str, Any] = ( - redcap_project_dashboard_query.to_dict() - ) + update_redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() # Clear Dashboard from Redis Cache cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index c4678e13..a23e0a92 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -57,7 +57,7 @@ "treatments", "scrweek", "scryear", - "scrweekyear", + "scrdate", ] # Survey Column Groups @@ -139,8 +139,8 @@ # redcap_report_merge_map: Dict[str, Dict[str, Any]] = { - "participants-list": {"on": index_columns, "how": "inner"}, - "participant-value": {"on": index_columns, "how": "inner"}, + "participant-list": {"on": index_columns, "how": "inner"}, + "participant-values": {"on": index_columns, "how": "inner"}, "instrument-status": {"on": index_columns, "how": "inner"}, "repeat-instrument": {"on": index_columns, "how": "outer"}, } @@ -154,7 +154,18 @@ redcapTransformConfig: Dict[str, Any] = { "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] { - "key": "participant-value", + "key": "participant-list", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "", + }, + "transforms": [], + }, + { + "key": "participant-values", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -171,9 +182,7 @@ "column": "scrcmpdat", "new_column_name": "scrweek", # ISO 8601 string format token for front-end: %V - "transform": lambda x: int( - datetime.strptime(x, "%Y-%m-%d").isocalendar().week - ), + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, "missing_value": missing_value_generic, }, ), @@ -183,9 +192,7 @@ "column": "scrcmpdat", "new_column_name": "scryear", # ISO 8601 string format token for front-end: %Y - "transform": lambda x: int( - datetime.strptime(x, "%Y-%m-%d").isocalendar().year - ), + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, "missing_value": missing_value_generic, }, ), @@ -193,12 +200,9 @@ "transform_values_by_column", { "column": "scrcmpdat", - "new_column_name": "scrweekyear", + "new_column_name": "scrdate", # ISO 8601 string format token for front-end: %Y - "transform": lambda x: ( - int(datetime.strptime(x, "%Y-%m-%d").isocalendar().week), - int(datetime.strptime(x, "%Y-%m-%d").isocalendar().year), - ), + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), "missing_value": missing_value_generic, }, ), @@ -269,7 +273,8 @@ "post_transform_merge": ( index_columns, [ - ("participant-value", {"on": index_columns, "how": "inner"}), + ("participant-list", {"on": index_columns, "how": "inner"}), + ("participant-values", {"on": index_columns, "how": "inner"}), ("instrument-status", {"on": index_columns, "how": "inner"}), ("repeat-instrument", {"on": index_columns, "how": "outer"}), ], @@ -1648,10 +1653,10 @@ "transforms": [ { "name": "Race Recruitment by Site", - "vtype": "DoubleDiscrete", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "race", "scrweek"], + "groups": ["siteid", "race", "scrdate"], "value": "record_id", "func": "count", } @@ -1671,9 +1676,9 @@ }, "x": { "name": "Week of the Year", - "field": "scrweek", + "field": "scrdate", "missing_value": missing_value_generic, - "astype": int, + "astype": str, }, "y": { "name": "Cumulative Count (N)", @@ -1696,10 +1701,10 @@ "transforms": [ { "name": "Phenotype Recruitment by Site", - "vtype": "DoubleDiscrete", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "phenotypes", "scrweek"], + "groups": ["siteid", "phenotypes", "scrdate"], "value": "record_id", "func": "count", } @@ -1719,9 +1724,9 @@ }, "x": { "name": "Week of the Year", - "field": "scrweek", + "field": "scrdate", "missing_value": missing_value_generic, - "astype": int, + "astype": str, }, "y": { "name": "Cumulative Count (N)", @@ -1829,6 +1834,53 @@ }, ) +# Phenotype & Site Counts by Sex +phenotypeSiteBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-site-by-sex", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Site by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "phenotypes", "siteid"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + # Phenotype & Race Counts by Sex phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", @@ -1926,6 +1978,7 @@ moduleTransformConfigs: Dict[str, Any] = { "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, + "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index 3725b8fe..b568aa4b 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -170,7 +170,9 @@ def simpleTransform(self, df: pd.DataFrame) -> object: groups. An aggregate function is then applied to the non-grouped column (e.g. count, sum). - One transform for one VType. + One transform for one VType. A single + visualization is then rendered to a single + visualization module. """ self.transformed = [] transform: Dict[str, Any] = ( @@ -220,7 +222,9 @@ def compoundTransform(self, df: pd.DataFrame) -> object: All transforms are combined into a single flat transform. Transforms must be identical VType, - e.g. [transformA, transformB, ...] + (e.g. [transformA, transformB, ...]). A single + (aggregated) visualization is then rendered to + a single visualization module. """ self.transformed = [] @@ -271,9 +275,11 @@ def mixedTransform(self, df: pd.DataFrame) -> object: groups. An aggregate function is then applied to the non-grouped column (e.g. count, sum). - Transforms are kept distinct inserted into a dictionary, - e.g. {nameA: transformA, nameB: transformB, ...}. - Transforms can be heterogenous VTypes. + Transforms are kept distinct and inserted into a + dictionary, e.g. {nameA: transformA, nameB: transformB, + ...}. Transforms can be heterogenous VTypes. + Multiple visualizations are then rendered in the same + visualization module. """ self.transformed = {} for transform in self.transforms: diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index a3eafddb..e7dba483 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -9,8 +9,8 @@ def __init__(self) -> None: "SingleTimeseries", [ ("filterby", str), - ("subgroup", str), - ("datetime", datetime), + ("group", str), + ("x", datetime), ], pd._libs.tslibs.nattype.NaTType, ) @@ -22,8 +22,8 @@ def __init__(self) -> None: "DoubleDiscreteTimeseries", [ ("filterby", str), - ("subgroup", str), - ("datetime", str), + ("group", str), + ("x", str), ("y", int), ], pd._libs.tslibs.nattype.NaTType, @@ -36,8 +36,8 @@ def __init__(self) -> None: "DoubleContinuousTimeseries", [ ("filterby", str), - ("subgroup", str), - ("datetime", str), + ("group", str), + ("x", str), ("y", float), ], pd._libs.tslibs.nattype.NaTType, From f8024f999f0b07c409e6ab4ed613fed595b4a389 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Fri, 1 Mar 2024 13:49:26 -0800 Subject: [PATCH 445/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20=20=20dataset=20r?= =?UTF-8?q?elated-identifier=20and=20publisher=20tables=20(#49)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: delete record keys * fix: add publisher table * fix: related item identifier model and table * fix: related item identifier endpoint * fix: dataset publisher * fix: test for related item identifier table * fix: modify dataset other table * fix: modify dataset_managing_organization * fix: test dataset other and managing org * fix:dataset other * fix: format * wip: testing version minimized steps * fix: testing version related identifier * fix: version related identifier --- apis/__init__.py | 8 +- ...ys.py => dataset_managing_organization.py} | 49 +- apis/dataset_metadata/dataset_other.py | 87 +- .../dataset_related_identifier.py | 128 ++ apis/dataset_metadata/dataset_related_item.py | 449 ------ model/__init__.py | 18 +- model/dataset.py | 19 +- model/dataset_metadata/dataset_other.py | 13 +- model/dataset_metadata/dataset_record_keys.py | 40 - .../dataset_related_identifier.py | 67 + .../dataset_metadata/dataset_related_item.py | 160 --- .../dataset_related_item_contributor.py | 58 - .../dataset_related_item_identifier.py | 61 - .../dataset_related_item_other.py | 68 - .../dataset_related_item_title.py | 50 - sql/init.sql | 20 - sql/init_timezones.sql | 24 - tests/conftest.py | 23 +- .../test_study_dataset_metadata_api.py | 1223 +++-------------- tests/functional/test_study_version_api.py | 238 +--- 20 files changed, 502 insertions(+), 2301 deletions(-) rename apis/dataset_metadata/{dataset_record_keys.py => dataset_managing_organization.py} (55%) create mode 100644 apis/dataset_metadata/dataset_related_identifier.py delete mode 100644 apis/dataset_metadata/dataset_related_item.py delete mode 100644 model/dataset_metadata/dataset_record_keys.py create mode 100644 model/dataset_metadata/dataset_related_identifier.py delete mode 100644 model/dataset_metadata/dataset_related_item.py delete mode 100644 model/dataset_metadata/dataset_related_item_contributor.py delete mode 100644 model/dataset_metadata/dataset_related_item_identifier.py delete mode 100644 model/dataset_metadata/dataset_related_item_other.py delete mode 100644 model/dataset_metadata/dataset_related_item_title.py diff --git a/apis/__init__.py b/apis/__init__.py index 2dba1c7d..652959de 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -19,8 +19,8 @@ from .dataset_metadata.dataset_funder import api as funder from .dataset_metadata.dataset_healthsheet import api as healthsheet from .dataset_metadata.dataset_other import api as dataset_other -from .dataset_metadata.dataset_record_keys import api as record_keys -from .dataset_metadata.dataset_related_item import api as related_item +from .dataset_metadata.dataset_managing_organization import api as managing_organization +from .dataset_metadata.dataset_related_identifier import api as related_identifier from .dataset_metadata.dataset_rights import api as rights from .dataset_metadata.dataset_subject import api as subject from .dataset_metadata.dataset_title import api as title @@ -54,6 +54,7 @@ ) __all__ = [ + "managing_organization", "dataset_metadata_namespace", "study_metadata_namespace", "authentication", @@ -68,8 +69,7 @@ "description", "funder", "dataset_other", - "record_keys", - "related_item", + "related_identifier", "api", "rights", "subject", diff --git a/apis/dataset_metadata/dataset_record_keys.py b/apis/dataset_metadata/dataset_managing_organization.py similarity index 55% rename from apis/dataset_metadata/dataset_record_keys.py rename to apis/dataset_metadata/dataset_managing_organization.py index c4146022..c04ce441 100644 --- a/apis/dataset_metadata/dataset_record_keys.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -1,4 +1,4 @@ -"""API endpoints for dataset record keys""" +"""API endpoints for other dataset metadata""" from flask import request from flask_restx import Resource, fields @@ -8,36 +8,36 @@ from apis.authentication import is_granted from apis.dataset_metadata_namespace import api -dataset_record_keys = api.model( - "DatasetRecordKeys", + +dataset_managing_organization = api.model( + "DatasetManagingOrganization", { - "id": fields.String(required=True), - "key_type": fields.String(required=False), - "key_details": fields.String(required=True), + "managing_organization_name": fields.String(required=True), + "managing_organization_ror_id": fields.String(required=True), }, ) -@api.route("/study//dataset//metadata/record-keys") -class DatasetRecordKeysResource(Resource): - """Dataset Record Keys Resource""" +@api.route("/study//dataset//metadata/managing-organization") +class DatasetManagingOrganization(Resource): + """Dataset Publisher Resource""" - @api.doc("record keys") + @api.doc("publisher") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(dataset_record_keys) + @api.marshal_with(dataset_managing_organization) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset record keys""" + """Get dataset publisher metadata""" dataset_ = model.Dataset.query.get(dataset_id) + managing_organization_ = dataset_.dataset_other + return managing_organization_.to_dict(), 200 - dataset_record_keys_ = dataset_.dataset_record_keys - return dataset_record_keys_.to_dict(), 200 - - @api.doc("update record keys") + @api.doc("update organization") @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(dataset_managing_organization) def put(self, study_id: int, dataset_id: int): - """Update dataset record keys""" + """Update dataset managing organization metadata""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): @@ -47,17 +47,17 @@ def put(self, study_id: int, dataset_id: int): "type": "object", "additionalProperties": False, "properties": { - "type": {"type": "string", "minLength": 1}, - "details": { + "managing_organization_name": {"type": "string", "minLength": 1}, + "managing_organization_ror_id": { "type": "string", }, + }, "required": [ - "type", - "details", + "managing_organization_name", + "managing_organization_ror_id" ], } - try: validate(instance=request.json, schema=schema) except ValidationError as err: @@ -65,6 +65,7 @@ def put(self, study_id: int, dataset_id: int): data = request.json dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_record_keys.update(data) + dataset_.dataset_other.update(data) + model.db.session.commit() - return dataset_.dataset_record_keys.to_dict(), 200 + return dataset_.dataset_other.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index e00f9b86..04246f97 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,25 +1,24 @@ """API endpoints for other dataset metadata""" from flask import request -from flask_restx import Resource, fields +from flask_restx import Resource from jsonschema import ValidationError, validate import model from apis.authentication import is_granted from apis.dataset_metadata_namespace import api -dataset_other = api.model( - "DatasetOther", - { - "language": fields.String(required=True), - "managing_organization_name": fields.String(required=True), - "managing_organization_ror_id": fields.String(required=True), - "size": fields.List(fields.String, required=True), - "standards_followed": fields.String(required=True), - "acknowledgement": fields.String(required=True), - "resource_type": fields.String(required=True), - }, -) +# dataset_other = api.model( +# "DatasetOther", +# { +# "language": fields.String(required=True), +# "size": fields.List(fields.String, required=True), +# "format": fields.List(fields.String, required=True), +# "standards_followed": fields.String(required=True), +# "acknowledgement": fields.String(required=True), +# "resource_type": fields.String(required=True), +# }, +# ) @api.route("/study//dataset//metadata/other") @@ -29,7 +28,7 @@ class DatasetOtherResource(Resource): @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_other) + # @api.marshal_with(dataset_other) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument """Get dataset other metadata""" dataset_ = model.Dataset.query.get(dataset_id) @@ -39,7 +38,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.doc("other update") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset_other) + # @api.marshal_with(dataset_other) def put(self, study_id: int, dataset_id: int): """Update dataset other metadata""" study_obj = model.Study.query.get(study_id) @@ -59,6 +58,11 @@ def put(self, study_id: int, dataset_id: int): "items": {"type": "string"}, "uniqueItems": True, }, + "format": { + "type": "array", + "items": {"type": "string"}, + "uniqueItems": True, + }, "standards_followed": {"type": "string"}, }, "required": [ @@ -80,56 +84,3 @@ def put(self, study_id: int, dataset_id: int): dataset_.dataset_other.update(data) model.db.session.commit() return dataset_.dataset_other.to_dict(), 200 - - -@api.route("/study//dataset//metadata/publisher") -class DatasetPublisherResource(Resource): - """Dataset Publisher Resource""" - - @api.doc("publisher") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_publisher) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset publisher metadata""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_other_ = dataset_.dataset_other - return dataset_other_.to_dict(), 200 - - @api.doc("update publisher") - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int, dataset_id: int): - """Update dataset publisher metadata""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "publisher": {"type": "string", "minLength": 1}, - "managing_organization_name": {"type": "string", "minLength": 1}, - "managing_organization_ror_id": { - "type": "string", - }, - }, - "required": [ - "publisher", - "managing_organization_name", - "managing_organization_ror_id", - ], - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_other.update(data) - model.db.session.commit() - return dataset_.dataset_other.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_related_identifier.py b/apis/dataset_metadata/dataset_related_identifier.py new file mode 100644 index 00000000..b4b76888 --- /dev/null +++ b/apis/dataset_metadata/dataset_related_identifier.py @@ -0,0 +1,128 @@ +"""API for dataset related identifier""" + +from typing import Any, Union + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_related_identifier = api.model( + "DatasetRelatedIdentifier", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=False), + "relation_type": fields.String(required=False), + "related_metadata_scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "scheme_type": fields.String(required=True), + "resource_type": fields.String(required=False), + }, +) + + +@api.route("/study//dataset//metadata/related-identifier") +class DatasetRelatedIdentifierResource(Resource): + """Dataset related identifier Resource""" + + @api.doc("related identifier") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(dataset_related_identifier) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset related identifier""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_related_identifier_ = dataset_.dataset_related_identifier + return [d.to_dict() for d in dataset_related_identifier_], 200 + + @api.doc("update related identifier") + @api.response(201, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + """Update dataset related identifier""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not" + " make any change in dataset metadata" # noqa: E402 + ), 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": {"type": "string", "minLength": 1}, + "identifier_type": {"type": ["string", "null"], "minLength": 1}, + "relation_type": {"type": ["string", "null"], "minLength": 1}, + "related_metadata_scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "scheme_type": {"type": "string"}, + "resource_type": {"type": ["string", "null"]}, + }, + "required": [ + "identifier", + "identifier_type", + "relation_type", + "related_metadata_scheme", + "scheme_uri", + "scheme_type", + ], + }, + "uniqueItems": True, + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get(i["id"]) + if not dataset_related_identifier_: + return f"{i['id']} Id is not found", 404 + dataset_related_identifier_.update(i) + list_of_elements.append(dataset_related_identifier_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_related_identifier_ = model.DatasetRelatedIdentifier.from_data(data_obj, i) + model.db.session.add(dataset_related_identifier_) + list_of_elements.append(dataset_related_identifier_.to_dict()) + model.db.session.commit() + return list_of_elements, 201 + + +@api.route( + "/study//dataset//metadata/related-identifier/" +) +class DatasetRelatedIdentifierUpdate(Resource): + """Dataset related identifier Update Resource""" + + @api.doc("delete related identifier") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_identifier_id: int, + ): + """Delete dataset related identifier""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get(related_identifier_id) + + model.db.session.delete(dataset_related_identifier_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_related_item.py b/apis/dataset_metadata/dataset_related_item.py deleted file mode 100644 index 0a409b6a..00000000 --- a/apis/dataset_metadata/dataset_related_item.py +++ /dev/null @@ -1,449 +0,0 @@ -"""API for dataset related item""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_related_item = api.model( - "DatasetRelatedItem", - { - "id": fields.String(required=True), - "type": fields.String(required=True), - "relation_type": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/related-item") -class DatasetRelatedItemResource(Resource): - """Dataset Related Item Resource""" - - @api.doc("related item") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_related_item) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset related item""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_related_item_ = dataset_.dataset_related_item - return [d.to_dict() for d in dataset_related_item_], 200 - - @api.doc("update related item") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset related item""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return ( - "Access denied, you can not" - " make any change in dataset metadata" # noqa: E402 - ), 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "contributors": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "contributor_type": {"type": "string", "minLength": 1}, - "name_type": { - "type": "string", - "enum": ["Personal", "Organizational"], - }, - }, - "required": ["contributor_type", "name_type", "name"], - }, - "uniqueItems": True, - }, - "creators": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "name_type": { - "type": "string", - "enum": ["Personal", "Organizational"], - }, - }, - "required": ["name", "name_type"], - }, - "uniqueItems": True, - }, - "edition": {"type": "string"}, - "first_page": {"type": "string"}, - "identifiers": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "identifier": {"type": "string", "minLength": 1}, - "metadata_scheme": {"type": "string"}, - "scheme_type": {"type": "string"}, - "scheme_uri": {"type": "string"}, - "type": { - "type": "string", - "enum": [ - "ARK", - "arXiv", - "bibcode", - "DOI", - "EAN13", - "EISSN", - "Handle", - "IGSN", - "ISBN", - "ISSN", - "ISTC", - "LISSN", - "LSID", - "PMID", - "PURL", - "UPC", - "URL", - "URN", - "w3id", - "Other", - ], - }, - }, - "required": [ - "identifier", - "metadata_scheme", - "scheme_type", - "scheme_uri", - "type", - ], - }, - "uniqueItems": True, - }, - "issue": {"type": "string"}, - "last_page": {"type": "string"}, - "number_type": {"type": "string"}, - "number_value": {"type": "string"}, - "publication_year": {"type": ["integer", "null"]}, - "publisher": {"type": "string"}, - "relation_type": {"type": "string", "minLength": 1}, - "titles": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "title": {"type": "string", "minLength": 1}, - "type": { - "type": "string", - "enum": [ - "MainTitle", - "AlternativeTitle", - "Subtitle", - "TranslatedTitle", - "OtherTitle", - ], - }, - }, - "required": ["title", "type"], - }, - "minItems": 1, - "uniqueItems": True, - }, - "type": {"type": "string", "minLength": 1}, - "volume": {"type": "string"}, - }, - "required": [ - "contributors", - "creators", - "edition", - "first_page", - "identifiers", - "issue", - "last_page", - "number_type", - "number_value", - "publication_year", - "publisher", - "relation_type", - "titles", - "type", - "volume", - ], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - for i in data: - if "id" in i and i["id"]: - dataset_related_item_ = model.DatasetRelatedItem.query.get(i["id"]) - if not dataset_related_item_: - return f"{i['id']} Id is not found", 404 - dataset_related_item_.update(i) - # dataset_related_item_.dataset_related_item_other.update(i) - - for title in i["titles"]: - if "id" in title and title["id"]: - update_title = model.DatasetRelatedItemTitle.query.get( - title["id"] - ) - update_title.update(title) - else: - title_add = model.DatasetRelatedItemTitle.from_data( - dataset_related_item_, title - ) - model.db.session.add(title_add) - - for identifier in i["identifiers"]: - if "id" in identifier and identifier["id"]: - update_identifier = ( - model.DatasetRelatedItemIdentifier.query.get( - identifier["id"] - ) - ) - update_identifier.update(identifier) - else: - identifier_add = model.DatasetRelatedItemIdentifier.from_data( - dataset_related_item_, identifier - ) - model.db.session.add(identifier_add) - contributors_ = i["contributors"] - creators_ = i["creators"] - for c in contributors_: - if "id" in c and c["id"]: - related_item_contributors_ = ( - model.DatasetRelatedItemContributor.query.get(c["id"]) - ) - related_item_contributors_.update(c) - model.db.session.add(related_item_contributors_) - else: - related_item_contributors_ = ( - model.DatasetRelatedItemContributor.from_data( - dataset_related_item_, c, False - ) - ) - model.db.session.add(related_item_contributors_) - - for c in creators_: - if "id" in c and c["id"]: - related_item_creators_ = ( - model.DatasetRelatedItemContributor.query.get(c["id"]) - ) - - related_item_creators_.update(c) - else: - related_item_creators_ = ( - model.DatasetRelatedItemContributor.from_data( - dataset_related_item_, c, True - ) - ) - model.db.session.add(related_item_creators_) - - # list_of_elements.append(dataset_related_item_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_related_item_ = model.DatasetRelatedItem.from_data(data_obj, i) - model.db.session.add(dataset_related_item_) - - for t in i["titles"]: - title_add = model.DatasetRelatedItemTitle.from_data( - dataset_related_item_, t - ) - model.db.session.add(title_add) - - for identifier in i["identifiers"]: - identifier_add = model.DatasetRelatedItemIdentifier.from_data( - dataset_related_item_, identifier - ) - model.db.session.add(identifier_add) - - contributors_ = i["contributors"] - creators_ = i["creators"] - for c in contributors_: - related_item_contributors_ = ( - model.DatasetRelatedItemContributor.from_data( - dataset_related_item_, c, False - ) - ) - model.db.session.add(related_item_contributors_) - - for c in creators_: - related_item_creators_ = ( - model.DatasetRelatedItemContributor.from_data( - dataset_related_item_, c, True - ) - ) - model.db.session.add(related_item_creators_) - - model.db.session.commit() - return [item.to_dict() for item in data_obj.dataset_related_item], 201 - - -@api.route( - "/study//dataset//metadata/related-item/" -) -class DatasetRelatedItemUpdate(Resource): - """Dataset Related Item Update Resource""" - - @api.doc("delete related item") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - related_item_id: int, - ): - """Delete dataset related item""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_related_item_ = model.DatasetRelatedItem.query.get(related_item_id) - - model.db.session.delete(dataset_related_item_) - model.db.session.commit() - - return Response(status=204) - - -@api.route( - "/study//dataset//metadata/related-item/" - "/contributor/" -) -class RelatedItemContributorsDelete(Resource): - """Dataset Related Item Contributors Delete Resource""" - - @api.doc("delete related item contributors") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - related_item_id: int, # pylint: disable= unused-argument - contributor_id: int, - ): - """Delete dataset related item contributors""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_contributors_ = model.DatasetRelatedItemContributor.query.get( - contributor_id - ) - model.db.session.delete(dataset_contributors_) - model.db.session.commit() - - return Response(status=204) - - -@api.route( - "/study//dataset//metadata/" - "related-item//title/" -) -class RelatedItemTitlesDelete(Resource): - """Dataset Related Item Titles Delete Resource""" - - @api.doc("delete related item title") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - related_item_id: int, # pylint: disable= unused-argument - title_id: int, - ): - """Delete dataset related item title""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_title_ = model.DatasetRelatedItemTitle.query.get(title_id) - if dataset_title_.type == "MainTitle": - return ( - "Main Title type can not be deleted", - 403, - ) - model.db.session.delete(dataset_title_) - model.db.session.commit() - return Response(status=204) - - -@api.route( - "/study//dataset//metadata/" - "related-item//identifier/" -) -class RelatedItemIdentifiersDelete(Resource): - """Dataset Related Item Identifiers Delete Resource""" - - @api.doc("delete related item identifier") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - related_item_id: int, # pylint: disable= unused-argument - identifier_id: int, - ): - """Delete dataset related item identifier""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_identifier_ = model.DatasetRelatedItemIdentifier.query.get( - identifier_id - ) - model.db.session.delete(dataset_identifier_) - model.db.session.commit() - return Response(status=204) - - -@api.route( - "/study//dataset//metadata/related-item/" - "/creator/" # pylint: disable = line-too-long -) -class RelatedItemCreatorDelete(Resource): - """Dataset Related Item Creator Delete Resource""" - - @api.doc("delete related item creator") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - related_item_id: int, # pylint: disable= unused-argument - creator_id: int, - ): - """Delete dataset related item creator""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_creator_ = model.DatasetRelatedItemContributor.query.get(creator_id) - model.db.session.delete(dataset_creator_) - model.db.session.commit() - return Response(status=204) diff --git a/model/__init__.py b/model/__init__.py index 1adc44d8..67fbe758 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,13 +1,5 @@ from model.dataset_metadata.dataset_contributor import DatasetContributor -from model.dataset_metadata.dataset_related_item import DatasetRelatedItem -from model.dataset_metadata.dataset_related_item_contributor import ( - DatasetRelatedItemContributor, -) -from model.dataset_metadata.dataset_related_item_identifier import ( - DatasetRelatedItemIdentifier, -) -from model.dataset_metadata.dataset_related_item_other import DatasetRelatedItemOther -from model.dataset_metadata.dataset_related_item_title import DatasetRelatedItemTitle +from model.dataset_metadata.dataset_related_identifier import DatasetRelatedIdentifier from .dataset import Dataset from .dataset_metadata.dataset_access import DatasetAccess @@ -19,7 +11,6 @@ from .dataset_metadata.dataset_funder import DatasetFunder from .dataset_metadata.dataset_healthsheet import DatasetHealthsheet from .dataset_metadata.dataset_other import DatasetOther -from .dataset_metadata.dataset_record_keys import DatasetRecordKeys from .dataset_metadata.dataset_rights import DatasetRights from .dataset_metadata.dataset_subject import DatasetSubject from .dataset_metadata.dataset_title import DatasetTitle @@ -76,14 +67,9 @@ "DatasetFunder", "DatasetAlternateIdentifier", "DatasetRights", - "DatasetRecordKeys", "DatasetTitle", "DatasetSubject", - "DatasetRelatedItemContributor", - "DatasetRelatedItemIdentifier", - "DatasetRelatedItemOther", - "DatasetRelatedItemTitle", - "DatasetRelatedItem", + "DatasetRelatedIdentifier", "DatasetDescription", "StudyArm", "StudyAvailableIpd", diff --git a/model/dataset.py b/model/dataset.py index 2770881f..dfb5c0b9 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -17,7 +17,6 @@ def __init__(self, study): self.created_at = datetime.datetime.now(timezone.utc).timestamp() self.dataset_access = model.DatasetAccess(self) - self.dataset_record_keys = model.DatasetRecordKeys(self) self.dataset_de_ident_level = model.DatasetDeIdentLevel(self) self.dataset_consent = model.DatasetConsent(self) self.dataset_healthsheet = model.DatasetHealthsheet(self) @@ -95,14 +94,8 @@ def __init__(self, study): dataset_other = db.relationship( "DatasetOther", back_populates="dataset", uselist=False, cascade="all, delete" ) - dataset_record_keys = db.relationship( - "DatasetRecordKeys", - back_populates="dataset", - uselist=False, - cascade="all, delete", - ) - dataset_related_item = db.relationship( - "DatasetRelatedItem", back_populates="dataset", cascade="all, delete" + dataset_related_identifier = db.relationship( + "DatasetRelatedIdentifier", back_populates="dataset", cascade="all, delete" ) dataset_rights = db.relationship( "DatasetRights", back_populates="dataset", cascade="all, delete" @@ -138,7 +131,7 @@ def to_dict_dataset_metadata(self): if not i.creator ], "about": self.dataset_other.to_dict_metadata(), - "publisher": self.dataset_other.to_dict_publisher(), # type: ignore + "managing_organization": self.dataset_other.to_dict_managing_organization(), # type: ignore "access": self.dataset_access.to_dict_metadata(), "consent": self.dataset_consent.to_dict_metadata(), "dates": [i.to_dict_metadata() for i in self.dataset_date], # type: ignore @@ -158,9 +151,9 @@ def to_dict_dataset_metadata(self): for i in self.dataset_contributors # type: ignore if i.creator ], - "record_keys": self.dataset_record_keys.to_dict_metadata(), - "related_items": [ - i.to_dict_metadata() for i in self.dataset_related_item # type: ignore + "related_identifier": [ + i.to_dict_metadata() + for i in self.dataset_related_identifier # type: ignore ], "rights": [ i.to_dict_metadata() for i in self.dataset_rights # type: ignore diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index b1846743..411af35e 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -12,9 +12,9 @@ def __init__(self, dataset): self.managing_organization_name = "" self.managing_organization_ror_id = "" self.size = "" + self.format = "" self.standards_followed = "" self.acknowledgement = "" - self.publisher = "" __tablename__ = "dataset_other" @@ -23,9 +23,9 @@ def __init__(self, dataset): managing_organization_name = db.Column(db.String, nullable=False) managing_organization_ror_id = db.Column(db.String, nullable=False) size = db.Column(ARRAY(String), nullable=False) + format = db.Column(ARRAY(String), nullable=False) standards_followed = db.Column(db.String, nullable=False) acknowledgement = db.Column(db.String, nullable=False) - publisher = db.Column(db.String, nullable=False) dataset_id = db.Column( db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False @@ -40,7 +40,7 @@ def to_dict(self): "standards_followed": self.standards_followed, "acknowledgement": self.acknowledgement, "size": self.size, - "publisher": self.publisher, + "format": self.format, "resource_type": self.resource_type, } @@ -51,10 +51,9 @@ def to_dict_metadata(self): "resource_type": self.resource_type, } - def to_dict_publisher(self): + def to_dict_managing_organization(self): return { "managing_organization_name": self.managing_organization_name, - "publisher": self.publisher, } @staticmethod @@ -72,12 +71,12 @@ def update(self, data: dict): self.managing_organization_ror_id = data["managing_organization_ror_id"] if "size" in data: self.size = data["size"] + if "format" in data: + self.format = data["format"] if "acknowledgement" in data: self.acknowledgement = data["acknowledgement"] if "standards_followed" in data: self.standards_followed = data["standards_followed"] - if "publisher" in data: - self.publisher = data["publisher"] if "resource_type" in data: self.resource_type = data["resource_type"] self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_record_keys.py b/model/dataset_metadata/dataset_record_keys.py deleted file mode 100644 index 9f2d9b94..00000000 --- a/model/dataset_metadata/dataset_record_keys.py +++ /dev/null @@ -1,40 +0,0 @@ -from ..db import db - - -class DatasetRecordKeys(db.Model): # type: ignore - def __init__(self, dataset): - self.dataset = dataset - self.key_type = None - self.key_details = "" - - __tablename__ = "dataset_record_keys" - key_type = db.Column(db.String, nullable=True) - key_details = db.Column(db.String, nullable=False) - - dataset_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False - ) - dataset = db.relationship("Dataset", back_populates="dataset_record_keys") - - def to_dict(self): - return { - "type": self.key_type, - "details": self.key_details, - } - - def to_dict_metadata(self): - return { - "key_type": self.key_type, - "key_details": self.key_details, - } - - @staticmethod - def from_data(dataset, data: dict): - dataset_record_keys = DatasetRecordKeys(dataset) - dataset_record_keys.update(data) - return dataset_record_keys - - def update(self, data: dict): - self.key_type = data["type"] - self.key_details = data["details"] - self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_related_identifier.py b/model/dataset_metadata/dataset_related_identifier.py new file mode 100644 index 00000000..fd1c738d --- /dev/null +++ b/model/dataset_metadata/dataset_related_identifier.py @@ -0,0 +1,67 @@ +import datetime +import uuid +from datetime import timezone + +from ..db import db + + +class DatasetRelatedIdentifier(db.Model): # type: ignore + def __init__(self, dataset): + self.id = str(uuid.uuid4()) + self.dataset = dataset + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + + __tablename__ = "dataset_related_identifier" + + id = db.Column(db.CHAR(36), primary_key=True) + + identifier = db.Column(db.String, nullable=False) + identifier_type = db.Column(db.String, nullable=True) + relation_type = db.Column(db.String, nullable=True) + related_metadata_scheme = db.Column(db.String, nullable=False) + scheme_uri = db.Column(db.String, nullable=False) + scheme_type = db.Column(db.String, nullable=False) + resource_type = db.Column(db.String, nullable=True) + + created_at = db.Column(db.BigInteger, nullable=False) + + dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) + dataset = db.relationship("Dataset", back_populates="dataset_related_identifier") + + def to_dict(self): + return { + "id": self.id, + "identifier": self.identifier, + "identifier_type": self.identifier_type, + "relation_type": self.relation_type, + "related_metadata_scheme": self.related_metadata_scheme, + "scheme_uri": self.scheme_uri, + "scheme_type": self.scheme_type, + "resource_type": self.resource_type, + "created_at": self.created_at, + } + + def to_dict_metadata(self): + return { + "id": self.id, + "identifier": self.identifier, + "relation_type": self.relation_type, + "resource_type": self.resource_type, + } + + @staticmethod + def from_data(dataset, data: dict): + dataset_related_identifier = DatasetRelatedIdentifier(dataset) + dataset_related_identifier.update(data) + return dataset_related_identifier + + def update(self, data: dict): + self.identifier = data["identifier"] + self.identifier_type = data["identifier_type"] + self.relation_type = data["relation_type"] + self.related_metadata_scheme = data["related_metadata_scheme"] + self.scheme_uri = data["scheme_uri"] + self.scheme_type = data["scheme_type"] + self.resource_type = data["resource_type"] + + self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_related_item.py b/model/dataset_metadata/dataset_related_item.py deleted file mode 100644 index 13b7b344..00000000 --- a/model/dataset_metadata/dataset_related_item.py +++ /dev/null @@ -1,160 +0,0 @@ -import datetime -import uuid -from datetime import timezone - -import model - -from ..db import db - - -class DatasetRelatedItem(db.Model): # type: ignore - def __init__(self, dataset): - self.id = str(uuid.uuid4()) - self.dataset = dataset - self.created_at = datetime.datetime.now(timezone.utc).timestamp() - self.dataset_related_item_other = model.DatasetRelatedItemOther(self) - - __tablename__ = "dataset_related_item" - - id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=True) - relation_type = db.Column(db.String, nullable=True) - created_at = db.Column(db.BigInteger, nullable=False) - - dataset_id = db.Column(db.CHAR(36), db.ForeignKey("dataset.id"), nullable=False) - dataset = db.relationship("Dataset", back_populates="dataset_related_item") - - dataset_related_item_contributor = db.relationship( - "DatasetRelatedItemContributor", - back_populates="dataset_related_item", - cascade="all, delete", - ) - dataset_related_item_identifier = db.relationship( - "DatasetRelatedItemIdentifier", - back_populates="dataset_related_item", - cascade="all, delete", - ) - dataset_related_item_other = db.relationship( - "DatasetRelatedItemOther", - back_populates="dataset_related_item", - uselist=False, - cascade="all, delete", - ) - dataset_related_item_title = db.relationship( - "DatasetRelatedItemTitle", - back_populates="dataset_related_item", - cascade="all, delete", - ) - - def to_dict(self): - sorted_contributors = sorted( - self.dataset_related_item_contributor, - key=lambda creator: creator.created_at, - ) - creators = [c for c in sorted_contributors if c.creator] - contributors = [c for c in sorted_contributors if not c.creator] - return { - "id": self.id, - "type": self.type, - "relation_type": self.relation_type, - "created_at": self.created_at, - "titles": [ - i.to_dict() for i in self.dataset_related_item_title # type: ignore - ], - "creators": [c.to_dict() for c in creators], - "contributors": [c.to_dict() for c in contributors], - "publication_year": ( - self.dataset_related_item_other.publication_year - if self.dataset_related_item_other - else None - ), - "volume": ( - self.dataset_related_item_other.volume - if self.dataset_related_item_other - else None - ), - "issue": ( - self.dataset_related_item_other.issue - if self.dataset_related_item_other - else None - ), - "number_value": ( - self.dataset_related_item_other.number_value - if self.dataset_related_item_other - else None - ), - "number_type": ( - self.dataset_related_item_other.number_type - if self.dataset_related_item_other - else None - ), - "first_page": ( - self.dataset_related_item_other.first_page - if self.dataset_related_item_other - else None - ), - "last_page": ( - self.dataset_related_item_other.last_page - if self.dataset_related_item_other - else None - ), - "publisher": ( - self.dataset_related_item_other.publisher - if self.dataset_related_item_other - else None - ), - "edition": ( - self.dataset_related_item_other.edition - if self.dataset_related_item_other - else None - ), - "identifiers": [ - i.to_dict() - for i in self.dataset_related_item_identifier # type: ignore - ], - } - - def to_dict_metadata(self): - bigint_timestamp = self.dataset_related_item_other.publication_year - pub_year = "" - if bigint_timestamp: - unix_timestamp = bigint_timestamp / 1000 - datetime_obj = datetime.datetime.utcfromtimestamp(unix_timestamp) - pub_year = datetime_obj.strftime("%Y") - sorted_contributors = sorted( - self.dataset_related_item_contributor, - key=lambda creator: creator.created_at, - ) - - creators = [c for c in sorted_contributors if c.creator] - contributors = [c for c in sorted_contributors if not c.creator] - return { - "type": self.type, - "titles": [ - i.to_dict_metadata() - for i in self.dataset_related_item_title # type: ignore - ], - "identifiers": [ - i.to_dict_metadata() - for i in self.dataset_related_item_identifier # type: ignore - ], - "creators": [i.to_dict_metadata() for i in creators], # type: ignore - "contributors": [ - i.to_dict_metadata() for i in contributors # type: ignore - ], - # "publication_year": self.dataset_related_item_other.publication_year, - "publication_year": pub_year if bigint_timestamp else None, - "publisher": self.dataset_related_item_other.publisher, - } - - @staticmethod - def from_data(dataset, data: dict): - dataset_related_item = DatasetRelatedItem(dataset) - dataset_related_item.update(data) - return dataset_related_item - - def update(self, data: dict): - self.type = data["type"] - self.relation_type = data["relation_type"] - self.dataset_related_item_other.update(data) - self.dataset.touch_dataset() diff --git a/model/dataset_metadata/dataset_related_item_contributor.py b/model/dataset_metadata/dataset_related_item_contributor.py deleted file mode 100644 index 480757cb..00000000 --- a/model/dataset_metadata/dataset_related_item_contributor.py +++ /dev/null @@ -1,58 +0,0 @@ -import datetime -import uuid -from datetime import timezone - -from ..db import db - - -class DatasetRelatedItemContributor(db.Model): # type: ignore - def __init__(self, dataset_related_item, creator): - self.id = str(uuid.uuid4()) - self.dataset_related_item = dataset_related_item - self.created_at = datetime.datetime.now(timezone.utc).timestamp() - self.creator = creator - - __tablename__ = "dataset_related_item_contributor" - id = db.Column(db.CHAR(36), primary_key=True) - name = db.Column(db.String, nullable=False) - name_type = db.Column(db.String, nullable=True) - creator = db.Column(db.BOOLEAN, nullable=False) - contributor_type = db.Column(db.String, nullable=True) - created_at = db.Column(db.BigInteger, nullable=False) - - dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False - ) - dataset_related_item = db.relationship( - "DatasetRelatedItem", back_populates="dataset_related_item_contributor" - ) - - def to_dict(self): - return { - "id": self.id, - "name": self.name, - "name_type": self.name_type, - "contributor_type": self.contributor_type, - "created_at": self.created_at, - } - - def to_dict_metadata(self): - return { - "id": self.id, - "name": self.name, - "name_type": self.name_type, - "contributor_type": self.contributor_type, - } - - @staticmethod - def from_data(dataset_related_item, data: dict, creator): - contributor_ = DatasetRelatedItemContributor(dataset_related_item, creator) - contributor_.update(data) - return contributor_ - - def update(self, data: dict): - self.name = data["name"] if "name" in data else "" - self.name_type = data["name_type"] if "name_type" in data else None - self.contributor_type = ( - data["contributor_type"] if "contributor_type" in data else None - ) diff --git a/model/dataset_metadata/dataset_related_item_identifier.py b/model/dataset_metadata/dataset_related_item_identifier.py deleted file mode 100644 index 63d95f4b..00000000 --- a/model/dataset_metadata/dataset_related_item_identifier.py +++ /dev/null @@ -1,61 +0,0 @@ -import datetime -import uuid -from datetime import timezone - -from ..db import db - - -class DatasetRelatedItemIdentifier(db.Model): # type: ignore - def __init__(self, dataset_related_item): - self.id = str(uuid.uuid4()) - self.created_at = datetime.datetime.now(timezone.utc).timestamp() - self.dataset_related_item = dataset_related_item - - __tablename__ = "dataset_related_item_identifier" - id = db.Column(db.CHAR(36), primary_key=True) - identifier = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=True) - metadata_scheme = db.Column(db.String, nullable=True) - scheme_uri = db.Column(db.String, nullable=True) - scheme_type = db.Column(db.String, nullable=True) - created_at = db.Column(db.BigInteger, nullable=False) - - dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False - ) - dataset_related_item = db.relationship( - "DatasetRelatedItem", back_populates="dataset_related_item_identifier" - ) - - def to_dict(self): - return { - "id": self.id, - "identifier": self.identifier, - "type": self.type, - "metadata_scheme": self.metadata_scheme, - "scheme_uri": self.scheme_uri, - "scheme_type": self.scheme_type, - "created_at": self.created_at, - } - - def to_dict_metadata(self): - return { - "id": self.id, - "identifier": self.identifier, - "type": self.type, - } - - @staticmethod - def from_data(dataset_related_item, data: dict): - identifier_ = DatasetRelatedItemIdentifier(dataset_related_item) - identifier_.update(data) - return identifier_ - - def update(self, data: dict): - self.identifier = data["identifier"] if "identifier" in data else "" - self.type = data["type"] if "type" in data else None - self.metadata_scheme = ( - data["metadata_scheme"] if "metadata_scheme" in data else "" - ) - self.scheme_uri = data["scheme_uri"] if "scheme_uri" in data else "" - self.scheme_type = data["scheme_type"] if "scheme_type" in data else "" diff --git a/model/dataset_metadata/dataset_related_item_other.py b/model/dataset_metadata/dataset_related_item_other.py deleted file mode 100644 index 39cbe02a..00000000 --- a/model/dataset_metadata/dataset_related_item_other.py +++ /dev/null @@ -1,68 +0,0 @@ -from ..db import db - - -class DatasetRelatedItemOther(db.Model): # type: ignore - def __init__(self, dataset_related_item): - self.dataset_related_item = dataset_related_item - self.publication_year = None - self.volume = "" - self.issue = "" - self.number_value = "" - self.number_type = None - self.first_page = "" - self.last_page = "" - self.publisher = "" - self.edition = "" - - __tablename__ = "dataset_related_item_other" - publication_year = db.Column(db.BigInteger, nullable=True) - volume = db.Column(db.String, nullable=False) - issue = db.Column(db.String, nullable=False) - number_value = db.Column(db.String, nullable=False) - number_type = db.Column(db.String, nullable=True) - first_page = db.Column(db.String, nullable=False) - last_page = db.Column(db.String, nullable=False) - publisher = db.Column(db.String, nullable=False) - edition = db.Column(db.String, nullable=False) - - dataset_related_item_id = db.Column( - db.CHAR(36), - db.ForeignKey("dataset_related_item.id"), - primary_key=True, - nullable=False, - ) - dataset_related_item = db.relationship( - "DatasetRelatedItem", back_populates="dataset_related_item_other" - ) - - def to_dict(self): - return { - "publication_year": self.publication_year, - "volume": self.volume, - "issue": self.issue, - "number_value": self.number_value, - "number_type": self.number_type, - "first_page": self.first_page, - "last_page": self.last_page, - "publisher": self.publisher, - "edition": self.edition, - } - - @staticmethod - def from_data(dataset_related_item, data: dict): - dataset_related_item_other = DatasetRelatedItemOther(dataset_related_item) - dataset_related_item_other.update(data) - return dataset_related_item_other - - def update(self, data: dict): - self.publication_year = ( - data["publication_year"] if "publication_year" in data else None - ) - self.volume = data["volume"] if "volume" in data else "" - self.issue = data["issue"] if "issue" in data else "" - self.number_value = data["number_value"] if "number_value" in data else "" - self.number_type = data["number_type"] if "number_type" in data else None - self.first_page = data["first_page"] if "first_page" in data else "" - self.last_page = data["last_page"] if "last_page" in data else "" - self.publisher = data["publisher"] if "publisher" in data else "" - self.edition = data["edition"] if "edition" in data else "" diff --git a/model/dataset_metadata/dataset_related_item_title.py b/model/dataset_metadata/dataset_related_item_title.py deleted file mode 100644 index fab997e5..00000000 --- a/model/dataset_metadata/dataset_related_item_title.py +++ /dev/null @@ -1,50 +0,0 @@ -import datetime -import uuid -from datetime import timezone - -from ..db import db - - -class DatasetRelatedItemTitle(db.Model): # type: ignore - def __init__(self, dataset_related_item): - self.id = str(uuid.uuid4()) - self.dataset_related_item = dataset_related_item - self.created_at = datetime.datetime.now(timezone.utc).timestamp() - - __tablename__ = "dataset_related_item_title" - id = db.Column(db.CHAR(36), primary_key=True) - type = db.Column(db.String, nullable=True) - title = db.Column(db.String, nullable=False) - created_at = db.Column(db.BigInteger, nullable=False) - - dataset_related_item_id = db.Column( - db.CHAR(36), db.ForeignKey("dataset_related_item.id"), nullable=False - ) - dataset_related_item = db.relationship( - "DatasetRelatedItem", back_populates="dataset_related_item_title" - ) - - def to_dict(self): - return { - "id": self.id, - "type": self.type, - "title": self.title, - "created_at": self.created_at, - } - - def to_dict_metadata(self): - return { - "id": self.id, - "title": self.title, - "type": self.type, - } - - @staticmethod - def from_data(dataset_related_item, data: dict): - dataset_related_item_title = DatasetRelatedItemTitle(dataset_related_item) - dataset_related_item_title.update(data) - return dataset_related_item_title - - def update(self, data: dict): - self.type = data["type"] - self.title = data["title"] diff --git a/sql/init.sql b/sql/init.sql index 3fda8433..6ecb5e4b 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -244,26 +244,6 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma -- Dumping structure for table public.dataset_readme - --- Dumping structure for table public.dataset_record_keys -CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( - "id" CHAR(36) NOT NULL, - "key_type" VARCHAR NOT NULL, - "key_details" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NOT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_record_keys_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_record_keys: 4 rows -/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; -INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES - ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), - ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; - -- Dumping structure for table public.dataset_related_item CREATE TABLE IF NOT EXISTS "dataset_related_item" ( "id" CHAR(36) NOT NULL, diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 2cdcc030..5edb972d 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -296,30 +296,6 @@ INSERT INTO "dataset_other" ("id", "language", "managing_organization_name", "ma ('00000000-0000-0000-0000-000000000001', 'eng', 'Research Organisation Registry', 'https://ror.org', '{1}', 'https://ror.org/other', 'NA', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_other" ENABLE KEYS */; --- Dumping structure for table public.dataset_readme - --- Dumping data for table public.dataset_readme: -1 rows -/*!40000 ALTER TABLE "dataset_readme" DISABLE KEYS */; - --- Dumping structure for table public.dataset_record_keys -CREATE TABLE IF NOT EXISTS "dataset_record_keys" ( - "id" CHAR(36) NOT NULL, - "key_type" VARCHAR NOT NULL, - "key_details" VARCHAR NOT NULL, - "dataset_id" CHAR(36) NOT NULL, - PRIMARY KEY ("id"), - CONSTRAINT "dataset_record_keys_dataset_id_fkey" FOREIGN KEY ("dataset_id") REFERENCES "dataset" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION -); - --- Dumping data for table public.dataset_record_keys: -1 rows -/*!40000 ALTER TABLE "dataset_record_keys" DISABLE KEYS */; -INSERT INTO "dataset_record_keys" ("id", "key_type", "key_details", "dataset_id") VALUES - ('46867b5a-9eb1-4f0e-98ba-5b453c2c9ff2', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('bb834d3c-b59a-4968-b31c-51bd22c11c4f', 'test', 'test', '00000000-0000-0000-0000-000000000001'), - ('82fbb094-74c5-4dd1-9248-9e219c0b70f5', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'), - ('59c1b98d-876f-49f6-aeb0-f32d4fde6c3f', 'test1', 'test1', '00000000-0000-0000-0000-000000000001'); -/*!40000 ALTER TABLE "dataset_record_keys" ENABLE KEYS */; - -- Dumping structure for table public.dataset_related_item CREATE TABLE IF NOT EXISTS "dataset_related_item" ( "id" CHAR(36) NOT NULL, diff --git a/tests/conftest.py b/tests/conftest.py index 355749e2..cd1ddc1e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -65,12 +65,6 @@ pytest.global_dataset_creator_id_editor = "" pytest.global_dataset_funder_id_admin = "" pytest.global_dataset_funder_id_editor = "" -pytest.global_dataset_related_item_creator_id_admin = "" -pytest.global_dataset_related_item_creator_id_editor = "" -pytest.global_related_item_identifier_id_admin = "" -pytest.global_related_item_identifier_id_editor = "" -pytest.global_related_item_title_id_admin = "" -pytest.global_related_item_title_id_editor = "" pytest.global_dataset_rights_id_admin = "" pytest.global_dataset_rights_id_editor = "" pytest.global_dataset_subject_id_admin = "" @@ -78,16 +72,10 @@ pytest.global_dataset_title_id_admin = "" pytest.global_dataset_title_id_editor = "" -pytest.global_dataset_related_item_identifier_id_admin = "" -pytest.global_dataset_related_item_title_id_admin = "" -pytest.global_dataset_related_item_title_id_editor = "" -pytest.global_dataset_related_item_contributor_id_admin = "" -pytest.global_dataset_related_item_id_admin = "" -pytest.global_dataset_related_item_id_editor = "" -pytest.global_dataset_related_item_contributor_id_editor = "" +pytest.global_dataset_related_identifier_id_admin = "" +pytest.global_dataset_related_identifier_id_editor = "" pytest.global_dataset_description_id_admin = "" pytest.global_dataset_description_id_editor = "" -pytest.global_dataset_related_item_identifier_id_editor = "" # Dataset variables use for testing pytest.global_dataset_id = "" @@ -98,12 +86,7 @@ pytest.global_dataset_date_id = "" pytest.global_dataset_description_id = "" pytest.global_dataset_funder_id = "" -pytest.global_dataset_related_item_id = "" -pytest.global_dataset_related_item_contributor_id = "" -pytest.global_dataset_related_item_creator_id = "" -pytest.global_dataset_related_item_identifier_id = "" -pytest.global_dataset_related_item_main_title_id = "" -pytest.global_dataset_related_item_sub_title_id = "" +pytest.global_dataset_related_identifier_id = "" pytest.global_dataset_rights_id = "" pytest.global_dataset_subject_id = "" pytest.global_dataset_title_id = "" diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index d66ed259..5791d104 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -2714,6 +2714,7 @@ def test_put_other_dataset_metadata(clients): "language": "English", "resource_type": "Resource Type", "size": ["Size"], + "format": ["Format"], "standards_followed": "Standards Followed", }, ) @@ -2723,10 +2724,9 @@ def test_put_other_dataset_metadata(clients): assert response_data["acknowledgement"] == "Yes" assert response_data["language"] == "English" - # assert ( - # response_data["resource_type"] == "Resource Type" - # ) # CURRENTLY NOT BEING RETURNED + assert response_data["size"] == ["Size"] + assert response_data["format"] == ["Format"] assert response_data["standards_followed"] == "Standards Followed" admin_response = _admin_client.put( @@ -2734,8 +2734,9 @@ def test_put_other_dataset_metadata(clients): json={ "acknowledgement": "Yes", "language": "English", - "resource_type": "Admin Resource Type", + "resource_type": "Resource Type", "size": ["Size"], + "format": ["Format"], "standards_followed": "Standards Followed", }, ) @@ -2745,8 +2746,8 @@ def test_put_other_dataset_metadata(clients): assert admin_response_data["acknowledgement"] == "Yes" assert admin_response_data["language"] == "English" - # assert admin_response_data["resource_type"] == "Admin Resource Type" assert admin_response_data["size"] == ["Size"] + assert admin_response_data["format"] == ["Format"] assert admin_response_data["standards_followed"] == "Standards Followed" editor_response = _editor_client.put( @@ -2754,8 +2755,9 @@ def test_put_other_dataset_metadata(clients): json={ "acknowledgement": "Yes", "language": "English", - "resource_type": "Editor Resource Type", + "resource_type": "Resource Type", "size": ["Size"], + "format": ["Format"], "standards_followed": "Standards Followed", }, ) @@ -2765,8 +2767,8 @@ def test_put_other_dataset_metadata(clients): assert editor_response_data["acknowledgement"] == "Yes" assert editor_response_data["language"] == "English" - # assert editor_response_data["resource_type"] == "Editor Resource Type" assert editor_response_data["size"] == ["Size"] + assert editor_response_data["format"] == ["Format"] assert editor_response_data["standards_followed"] == "Standards Followed" viewer_response = _viewer_client.put( @@ -2774,12 +2776,12 @@ def test_put_other_dataset_metadata(clients): json={ "acknowledgement": "Yes", "language": "English", - "resource_type": "Viewer Resource Type", + "resource_type": "Resource Type", "size": ["Size"], + "format": ["Format"], "standards_followed": "Standards Followed", }, ) - assert viewer_response.status_code == 403 @@ -2811,7 +2813,7 @@ def test_get_other_dataset_metadata(clients): assert response.status_code == 200 assert admin_response.status_code == 200 assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 + # assert viewer_response.status_code == 200 response_data = json.loads(response.data) admin_response_data = json.loads(admin_response.data) @@ -2824,44 +2826,46 @@ def test_get_other_dataset_metadata(clients): assert response_data["language"] == "English" # assert response_data["resource_type"] == "Editor Resource Type" assert response_data["size"] == ["Size"] + assert response_data["format"] == ["Format"] assert response_data["standards_followed"] == "Standards Followed" assert admin_response_data["acknowledgement"] == "Yes" assert admin_response_data["language"] == "English" # assert admin_response_data["resource_type"] == "Editor Resource Type" assert admin_response_data["size"] == ["Size"] + assert admin_response_data["format"] == ["Format"] assert admin_response_data["standards_followed"] == "Standards Followed" assert editor_response_data["acknowledgement"] == "Yes" assert editor_response_data["language"] == "English" # assert editor_response_data["resource_type"] == "Editor Resource Type" assert editor_response_data["size"] == ["Size"] + assert editor_response_data["format"] == ["Format"] assert editor_response_data["standards_followed"] == "Standards Followed" assert viewer_response_data["acknowledgement"] == "Yes" assert viewer_response_data["language"] == "English" - # assert viewer_response_data["resource_type"] == "Editor Resource Type" assert viewer_response_data["size"] == ["Size"] + assert viewer_response_data["format"] == ["Format"] assert viewer_response_data["standards_followed"] == "Standards Followed" -# ------------------- PUBLICATION METADATA ------------------- # -def test_put_dataset_publisher_metadata(clients): +# ------------------- DATASET MANAGING ORGANIZATION METADATA ------------------- # +def test_put_dataset_managing_organization_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (PUT) Then check that the response is valid and updates the dataset - publisher metadata content + managing organization metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "publisher": "Publisher", "managing_organization_name": "Managing Organization Name", "managing_organization_ror_id": "Managing Organization ROR ID", }, @@ -2870,16 +2874,14 @@ def test_put_dataset_publisher_metadata(clients): assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["publisher"] == "Publisher" assert response_data["managing_organization_name"] == "Managing Organization Name" assert ( response_data["managing_organization_ror_id"] == "Managing Organization ROR ID" ) admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "publisher": "Publisher", "managing_organization_name": "Managing Admin Organization Name", "managing_organization_ror_id": "Managing Organization ROR ID", }, @@ -2888,7 +2890,6 @@ def test_put_dataset_publisher_metadata(clients): assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - assert admin_response_data["publisher"] == "Publisher" assert ( admin_response_data["managing_organization_name"] == "Managing Admin Organization Name" @@ -2899,9 +2900,8 @@ def test_put_dataset_publisher_metadata(clients): ) editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "publisher": "Publisher", "managing_organization_name": "Managing Editor Organization Name", "managing_organization_ror_id": "Managing Organization ROR ID", }, @@ -2910,7 +2910,6 @@ def test_put_dataset_publisher_metadata(clients): assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - assert editor_response_data["publisher"] == "Publisher" assert ( editor_response_data["managing_organization_name"] == "Managing Editor Organization Name" @@ -2921,9 +2920,8 @@ def test_put_dataset_publisher_metadata(clients): ) viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher", + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "publisher": "Publisher", "managing_organization_name": "Managing Viewer Organization Name", "managing_organization_ror_id": "Managing Organization ROR ID", }, @@ -2932,29 +2930,29 @@ def test_put_dataset_publisher_metadata(clients): assert viewer_response.status_code == 403 -def test_get_dataset_publisher_metadata(clients): +def test_get_dataset_managing_organization_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - publisher metadata content + managing-organization metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" ) admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" ) editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" ) viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/publisher" + f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" ) assert response.status_code == 200 @@ -2969,7 +2967,6 @@ def test_get_dataset_publisher_metadata(clients): # Editor was the last to update the metadata successfully so # the response should reflect that - assert response_data["publisher"] == "Publisher" assert ( response_data["managing_organization_name"] == "Managing Editor Organization Name" @@ -2978,7 +2975,6 @@ def test_get_dataset_publisher_metadata(clients): response_data["managing_organization_ror_id"] == "Managing Organization ROR ID" ) - assert admin_response_data["publisher"] == "Publisher" assert ( admin_response_data["managing_organization_name"] == "Managing Editor Organization Name" @@ -2988,7 +2984,6 @@ def test_get_dataset_publisher_metadata(clients): == "Managing Organization ROR ID" ) - assert editor_response_data["publisher"] == "Publisher" assert ( editor_response_data["managing_organization_name"] == "Managing Editor Organization Name" @@ -2998,7 +2993,6 @@ def test_get_dataset_publisher_metadata(clients): == "Managing Organization ROR ID" ) - assert viewer_response_data["publisher"] == "Publisher" assert ( viewer_response_data["managing_organization_name"] == "Managing Editor Organization Name" @@ -3009,158 +3003,30 @@ def test_get_dataset_publisher_metadata(clients): ) -# ------------------- RECORD KEYS METADATA ------------------- # -def test_put_dataset_record_keys_metadata(clients): +# ------------------- RELATED IDENTIFIER METADATA ------------------- # +def test_post_dataset_related_identifier_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - record keys metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", - json={"type": "Record Type", "details": "Details for Record Keys"}, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["type"] == "Record Type" - assert response_data["details"] == "Details for Record Keys" - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", - json={"type": "Record Type", "details": "Admin Details for Record Keys"}, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["type"] == "Record Type" - assert admin_response_data["details"] == "Admin Details for Record Keys" - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", - json={"type": "Record Type", "details": "Editor Details for Record Keys"}, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["type"] == "Record Type" - assert editor_response_data["details"] == "Editor Details for Record Keys" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys", - json={"type": "Record Type", "details": "Viewer Details for Record Keys"}, - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_record_keys_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - record keys metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/record-keys" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Editor was the last to update the metadata successfully so - # the response should reflect that - assert response_data["type"] == "Record Type" - assert response_data["details"] == "Editor Details for Record Keys" - - assert admin_response_data["type"] == "Record Type" - assert admin_response_data["details"] == "Editor Details for Record Keys" - - assert editor_response_data["type"] == "Record Type" - assert editor_response_data["details"] == "Editor Details for Record Keys" - - assert viewer_response_data["type"] == "Record Type" - assert viewer_response_data["details"] == "Editor Details for Record Keys" - - -# ------------------- RELATED ITEM METADATA ------------------- # -def test_post_dataset_related_item_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-item' + When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier' endpoint is requested (POST) Then check that the response is valid and creates the dataset - related item metadata content + related identifier metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", json=[ { - "contributors": [ - { - "name": "Ndafsdame", - "contributor_type": "Con Type", - "name_type": "Personal", - } - ], - "creators": [{"name": "Name", "name_type": "Personal"}], - "edition": "Edition", - "first_page": "First Page", - "identifiers": [ - { - "identifier": "Identifier", - "metadata_scheme": "Metadata Scheme", - "scheme_type": "Scheme Type", - "scheme_uri": "Scheme URI", - "type": "ARK", - } - ], - "issue": "Issue", - "last_page": "Last Page", - "number_type": "Number Type", - "number_value": "Number Value", - "publication_year": 2013, - "publisher": "Publisher", - "relation_type": "Relation Type", - "titles": [ - {"title": "Title", "type": "MainTitle"}, - {"title": "Title", "type": "Subtitle"}, - ], - "type": "Type", - "volume": "Volume", + "identifier": "test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test" } ], ) @@ -3169,100 +3035,28 @@ def test_post_dataset_related_item_metadata(clients): assert response.status_code == 201 response_data = json.loads(response.data) - # seach for main title and subtitle index in response_data[n]["titles"] - main_title_0 = next( - ( - index - for (index, d) in enumerate(response_data[0]["titles"]) - if d["type"] == "MainTitle" - ), - None, - ) - sub_title_0 = next( - ( - index - for (index, d) in enumerate(response_data[0]["titles"]) - if d["type"] == "Subtitle" - ), - None, - ) - pytest.global_dataset_related_item_id = response_data[0]["id"] - pytest.global_dataset_related_item_contributor_id = response_data[0][ - "contributors" - ][0]["id"] - pytest.global_dataset_related_item_creator_id = response_data[0]["creators"][0][ - "id" - ] - pytest.global_dataset_related_item_identifier_id = response_data[0]["identifiers"][ - 0 - ]["id"] - # pylint: disable=line-too-long - pytest.global_dataset_related_item_main_title_id = response_data[0]["titles"][ - main_title_0 - ]["id"] - pytest.global_dataset_related_item_sub_title_id = response_data[0]["titles"][ - sub_title_0 - ]["id"] - - assert response_data[0]["contributors"][0]["name"] == "Ndafsdame" - assert response_data[0]["contributors"][0]["contributor_type"] == "Con Type" - assert response_data[0]["contributors"][0]["name_type"] == "Personal" - assert response_data[0]["creators"][0]["name"] == "Name" - assert response_data[0]["creators"][0]["name_type"] == "Personal" - assert response_data[0]["edition"] == "Edition" - assert response_data[0]["first_page"] == "First Page" - assert response_data[0]["identifiers"][0]["identifier"] == "Identifier" - assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" - assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" - assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["identifiers"][0]["type"] == "ARK" - assert response_data[0]["issue"] == "Issue" - assert response_data[0]["last_page"] == "Last Page" - assert response_data[0]["number_type"] == "Number Type" - assert response_data[0]["number_value"] == "Number Value" - assert response_data[0]["publication_year"] == 2013 - assert response_data[0]["publisher"] == "Publisher" - assert response_data[0]["relation_type"] == "Relation Type" - assert response_data[0]["titles"][main_title_0]["title"] == "Title" - assert response_data[0]["titles"][main_title_0]["type"] == "MainTitle" - assert response_data[0]["titles"][sub_title_0]["title"] == "Title" - assert response_data[0]["titles"][sub_title_0]["type"] == "Subtitle" - assert response_data[0]["type"] == "Type" - assert response_data[0]["volume"] == "Volume" + + pytest.global_dataset_related_identifier_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "test identifier" + assert response_data[0]["identifier_type"] == "test identifier type" + assert response_data[0]["relation_type"] == "test relation type" + assert response_data[0]["related_metadata_scheme"] == "test" + assert response_data[0]["scheme_uri"] == "test" + assert response_data[0]["scheme_type"] == "test" + assert response_data[0]["resource_type"] == "test" admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", json=[ { - "contributors": [ - { - "name": "Admin Ndafsdame", - "contributor_type": "Admin Con Type", - "name_type": "Personal", - } - ], - "creators": [{"name": "Admin Name", "name_type": "Personal"}], - "edition": "Admin Edition", - "first_page": "Admin First Page", - "identifiers": [ - { - "identifier": "Admin Identifier", - "metadata_scheme": "Admin Metadata Scheme", - "scheme_type": "Admin Scheme Type", - "scheme_uri": "Admin Scheme URI", - "type": "ARK", - } - ], - "issue": "Admin Issue", - "last_page": "Admin Last Page", - "number_type": "Admin Number Type", - "number_value": "Admin Number Value", - "publication_year": 2013, - "publisher": "Admin Publisher", - "relation_type": "Admin Relation Type", - "titles": [{"title": "Admin Title", "type": "AlternativeTitle"}], - "type": "Admin Type", - "volume": "Admin Volume", + "identifier": "admin test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test" } ], ) @@ -3271,174 +3065,52 @@ def test_post_dataset_related_item_metadata(clients): assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_related_item_id_admin = admin_response_data[1]["id"] - pytest.global_dataset_related_item_contributor_id_admin = admin_response_data[1][ - "contributors" - ][0]["id"] - pytest.global_dataset_related_item_creator_id_admin = admin_response_data[1][ - "creators" - ][0]["id"] - pytest.global_dataset_related_item_identifier_id_admin = admin_response_data[1][ - "identifiers" - ][0]["id"] - pytest.global_dataset_related_item_title_id_admin = admin_response_data[1][ - "titles" - ][0]["id"] - - assert admin_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" - assert ( - admin_response_data[1]["contributors"][0]["contributor_type"] - == "Admin Con Type" - ) - assert admin_response_data[1]["contributors"][0]["name_type"] == "Personal" - assert admin_response_data[1]["creators"][0]["name"] == "Admin Name" - assert admin_response_data[1]["creators"][0]["name_type"] == "Personal" - assert admin_response_data[1]["edition"] == "Admin Edition" - assert admin_response_data[1]["first_page"] == "Admin First Page" - assert admin_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" - assert ( - admin_response_data[1]["identifiers"][0]["metadata_scheme"] - == "Admin Metadata Scheme" - ) - assert ( - admin_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" - ) - assert admin_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" - assert admin_response_data[1]["identifiers"][0]["type"] == "ARK" - assert admin_response_data[1]["issue"] == "Admin Issue" - assert admin_response_data[1]["last_page"] == "Admin Last Page" - assert admin_response_data[1]["number_type"] == "Admin Number Type" - assert admin_response_data[1]["number_value"] == "Admin Number Value" - assert admin_response_data[1]["publication_year"] == 2013 - assert admin_response_data[1]["publisher"] == "Admin Publisher" - assert admin_response_data[1]["relation_type"] == "Admin Relation Type" - assert admin_response_data[1]["titles"][0]["title"] == "Admin Title" - assert admin_response_data[1]["titles"][0]["type"] == "AlternativeTitle" - assert admin_response_data[1]["type"] == "Admin Type" - assert admin_response_data[1]["volume"] == "Admin Volume" - + pytest.global_dataset_related_identifier_id_admin = admin_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "admin test identifier" + assert admin_response_data[0]["identifier_type"] == "test identifier type" + assert admin_response_data[0]["relation_type"] == "test relation type" + assert admin_response_data[0]["related_metadata_scheme"] == "test" + assert admin_response_data[0]["scheme_uri"] == "test" + assert admin_response_data[0]["scheme_type"] == "test" + assert admin_response_data[0]["resource_type"] == "test" editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", json=[ { - "contributors": [ - { - "name": "Editor Ndafsdame", - "contributor_type": "Editor Con Type", - "name_type": "Personal", - } - ], - "creators": [{"name": "Editor Name", "name_type": "Personal"}], - "edition": "Editor Edition", - "first_page": "Editor First Page", - "identifiers": [ - { - "identifier": "Editor Identifier", - "metadata_scheme": "Editor Metadata Scheme", - "scheme_type": "Editor Scheme Type", - "scheme_uri": "Editor Scheme URI", - "type": "ARK", - } - ], - "issue": "Editor Issue", - "last_page": "Editor Last Page", - "number_type": "Editor Number Type", - "number_value": "Editor Number Value", - "publication_year": 2013, - "publisher": "Editor Publisher", - "relation_type": "Editor Relation Type", - "titles": [{"title": "Editor Title", "type": "Subtitle"}], - "type": "Editor Type", - "volume": "Editor Volume", + "identifier": "editor test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test" } ], ) assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_related_item_id_editor = editor_response_data[2]["id"] - pytest.global_dataset_related_item_contributor_id_editor = editor_response_data[2][ - "contributors" - ][0]["id"] - pytest.global_dataset_related_item_creator_id_editor = editor_response_data[2][ - "creators" - ][0]["id"] - pytest.global_dataset_related_item_identifier_id_editor = editor_response_data[2][ - "identifiers" - ][0]["id"] - pytest.global_dataset_related_item_title_id_editor = editor_response_data[2][ - "titles" - ][0]["id"] - - assert editor_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" - assert ( - editor_response_data[2]["contributors"][0]["contributor_type"] - == "Editor Con Type" - ) - assert editor_response_data[2]["contributors"][0]["name_type"] == "Personal" - assert editor_response_data[2]["creators"][0]["name"] == "Editor Name" - assert editor_response_data[2]["creators"][0]["name_type"] == "Personal" - assert editor_response_data[2]["edition"] == "Editor Edition" - assert editor_response_data[2]["first_page"] == "Editor First Page" - assert ( - editor_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" - ) - assert ( - editor_response_data[2]["identifiers"][0]["metadata_scheme"] - == "Editor Metadata Scheme" - ) - assert ( - editor_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" - ) - assert ( - editor_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" - ) - assert editor_response_data[2]["identifiers"][0]["type"] == "ARK" - assert editor_response_data[2]["issue"] == "Editor Issue" - assert editor_response_data[2]["last_page"] == "Editor Last Page" - assert editor_response_data[2]["number_type"] == "Editor Number Type" - assert editor_response_data[2]["number_value"] == "Editor Number Value" - assert editor_response_data[2]["publication_year"] == 2013 - assert editor_response_data[2]["publisher"] == "Editor Publisher" - assert editor_response_data[2]["relation_type"] == "Editor Relation Type" - assert editor_response_data[2]["titles"][0]["title"] == "Editor Title" - assert editor_response_data[2]["titles"][0]["type"] == "Subtitle" - assert editor_response_data[2]["type"] == "Editor Type" - assert editor_response_data[2]["volume"] == "Editor Volume" - + pytest.global_dataset_related_identifier_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["identifier"] == "editor test identifier" + assert editor_response_data[0]["identifier_type"] == "test identifier type" + assert editor_response_data[0]["relation_type"] == "test relation type" + assert editor_response_data[0]["related_metadata_scheme"] == "test" + assert editor_response_data[0]["scheme_uri"] == "test" + assert editor_response_data[0]["scheme_type"] == "test" + assert editor_response_data[0]["resource_type"] == "test" viewer_client = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", json=[ { - "contributors": [ - { - "name": "Viewer Ndafsdame", - "contributor_type": "Viewer Con Type", - "name_type": "Personal", - } - ], - "creators": [{"name": "Viewer Name", "name_type": "Personal"}], - "edition": "Viewer Edition", - "first_page": "Viewer First Page", - "identifiers": [ - { - "identifier": "Viewer Identifier", - "metadata_scheme": "Viewer Metadata Scheme", - "scheme_type": "Viewer Scheme Type", - "scheme_uri": "Viewer Scheme URI", - "type": "ARK", - } - ], - "issue": "Viewer Issue", - "last_page": "Viewer Last Page", - "number_type": "Viewer Number Type", - "number_value": "Viewer Number Value", - "publication_year": 2013, - "publisher": "Viewer Publisher", - "relation_type": "Viewer Relation Type", - "titles": [{"title": "Viewer Title", "type": "Subtitle"}], - "type": "Viewer Type", - "volume": "Viewer Volume", + "identifier": "viewer test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test" } ], ) @@ -3446,29 +3118,29 @@ def test_post_dataset_related_item_metadata(clients): assert viewer_client.status_code == 403 -def test_get_dataset_related_item_metadata(clients): +def test_get_dataset_related_identifier_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - related item metadata content + related identifier metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) assert response.status_code == 200 @@ -3483,615 +3155,128 @@ def test_get_dataset_related_item_metadata(clients): # seach for main title and subtitle index in response_data[n]["titles"] # pylint: disable=line-too-long - main_title_0 = next( - ( - index - for (index, d) in enumerate(response_data[0]["titles"]) - if d["type"] == "MainTitle" - ), - None, - ) - sub_title_0 = next( - ( - index - for (index, d) in enumerate(response_data[0]["titles"]) - if d["type"] == "Subtitle" - ), - None, - ) - a_main_title_0 = next( - ( - index - for (index, d) in enumerate(admin_response_data[0]["titles"]) - if d["type"] == "MainTitle" - ), - None, - ) - a_sub_title_0 = next( - ( - index - for (index, d) in enumerate(admin_response_data[0]["titles"]) - if d["type"] == "Subtitle" - ), - None, - ) - e_main_title_0 = next( - ( - index - for (index, d) in enumerate(editor_response_data[0]["titles"]) - if d["type"] == "MainTitle" - ), - None, - ) - e_sub_title_0 = next( - ( - index - for (index, d) in enumerate(editor_response_data[0]["titles"]) - if d["type"] == "Subtitle" - ), - None, - ) - v_main_title_0 = next( - ( - index - for (index, d) in enumerate(viewer_response_data[0]["titles"]) - if d["type"] == "MainTitle" - ), - None, - ) - v_sub_title_0 = next( - ( - index - for (index, d) in enumerate(viewer_response_data[0]["titles"]) - if d["type"] == "Subtitle" - ), - None, - ) - assert response_data[0]["contributors"][0]["name"] == "Ndafsdame" - assert response_data[0]["contributors"][0]["contributor_type"] == "Con Type" - assert response_data[0]["contributors"][0]["name_type"] == "Personal" - assert response_data[0]["creators"][0]["name"] == "Name" - assert response_data[0]["creators"][0]["name_type"] == "Personal" - assert response_data[0]["edition"] == "Edition" - assert response_data[0]["first_page"] == "First Page" - assert response_data[0]["identifiers"][0]["identifier"] == "Identifier" - assert response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" - assert response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" - assert response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["identifiers"][0]["type"] == "ARK" - assert response_data[0]["issue"] == "Issue" - assert response_data[0]["last_page"] == "Last Page" - assert response_data[0]["number_type"] == "Number Type" - assert response_data[0]["number_value"] == "Number Value" - assert response_data[0]["publication_year"] == 2013 - assert response_data[0]["publisher"] == "Publisher" - assert response_data[0]["relation_type"] == "Relation Type" - assert response_data[0]["titles"][main_title_0]["title"] == "Title" - assert response_data[0]["titles"][main_title_0]["type"] == "MainTitle" - assert response_data[0]["titles"][sub_title_0]["title"] == "Title" - assert response_data[0]["titles"][sub_title_0]["type"] == "Subtitle" - assert response_data[0]["type"] == "Type" - assert response_data[0]["volume"] == "Volume" - assert response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" - assert response_data[1]["contributors"][0]["contributor_type"] == "Admin Con Type" - assert response_data[1]["contributors"][0]["name_type"] == "Personal" - assert response_data[1]["creators"][0]["name"] == "Admin Name" - assert response_data[1]["creators"][0]["name_type"] == "Personal" - assert response_data[1]["edition"] == "Admin Edition" - assert response_data[1]["first_page"] == "Admin First Page" - assert response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" - assert ( - response_data[1]["identifiers"][0]["metadata_scheme"] == "Admin Metadata Scheme" - ) - assert response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" - assert response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" - assert response_data[1]["identifiers"][0]["type"] == "ARK" - assert response_data[1]["issue"] == "Admin Issue" - assert response_data[1]["last_page"] == "Admin Last Page" - assert response_data[1]["number_type"] == "Admin Number Type" - assert response_data[1]["number_value"] == "Admin Number Value" - assert response_data[1]["publication_year"] == 2013 - assert response_data[1]["publisher"] == "Admin Publisher" - assert response_data[1]["relation_type"] == "Admin Relation Type" - assert response_data[1]["titles"][0]["title"] == "Admin Title" - assert response_data[1]["titles"][0]["type"] == "AlternativeTitle" - assert response_data[1]["type"] == "Admin Type" - assert response_data[1]["volume"] == "Admin Volume" - assert response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" - assert response_data[2]["contributors"][0]["contributor_type"] == "Editor Con Type" - assert response_data[2]["contributors"][0]["name_type"] == "Personal" - assert response_data[2]["creators"][0]["name"] == "Editor Name" - assert response_data[2]["creators"][0]["name_type"] == "Personal" - assert response_data[2]["edition"] == "Editor Edition" - assert response_data[2]["first_page"] == "Editor First Page" - assert response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" - assert ( - response_data[2]["identifiers"][0]["metadata_scheme"] - == "Editor Metadata Scheme" - ) - assert response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" - assert response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" - assert response_data[2]["identifiers"][0]["type"] == "ARK" - assert response_data[2]["issue"] == "Editor Issue" - assert response_data[2]["last_page"] == "Editor Last Page" - assert response_data[2]["number_type"] == "Editor Number Type" - assert response_data[2]["number_value"] == "Editor Number Value" - assert response_data[2]["publication_year"] == 2013 - assert response_data[2]["publisher"] == "Editor Publisher" - assert response_data[2]["relation_type"] == "Editor Relation Type" - assert response_data[2]["titles"][0]["title"] == "Editor Title" - assert response_data[2]["titles"][0]["type"] == "Subtitle" - assert response_data[2]["type"] == "Editor Type" - assert response_data[2]["volume"] == "Editor Volume" - - assert admin_response_data[0]["contributors"][0]["name"] == "Ndafsdame" - assert admin_response_data[0]["contributors"][0]["contributor_type"] == "Con Type" - assert admin_response_data[0]["contributors"][0]["name_type"] == "Personal" - assert admin_response_data[0]["creators"][0]["name"] == "Name" - assert admin_response_data[0]["creators"][0]["name_type"] == "Personal" - assert admin_response_data[0]["edition"] == "Edition" - assert admin_response_data[0]["first_page"] == "First Page" - assert admin_response_data[0]["identifiers"][0]["identifier"] == "Identifier" - assert ( - admin_response_data[0]["identifiers"][0]["metadata_scheme"] == "Metadata Scheme" - ) - assert admin_response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" - assert admin_response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert admin_response_data[0]["identifiers"][0]["type"] == "ARK" - assert admin_response_data[0]["issue"] == "Issue" - assert admin_response_data[0]["last_page"] == "Last Page" - assert admin_response_data[0]["number_type"] == "Number Type" - assert admin_response_data[0]["number_value"] == "Number Value" - assert admin_response_data[0]["publication_year"] == 2013 - assert admin_response_data[0]["publisher"] == "Publisher" - assert admin_response_data[0]["relation_type"] == "Relation Type" - assert admin_response_data[0]["titles"][a_main_title_0]["title"] == "Title" - assert admin_response_data[0]["titles"][a_main_title_0]["type"] == "MainTitle" - assert admin_response_data[0]["titles"][a_sub_title_0]["title"] == "Title" - assert admin_response_data[0]["titles"][a_sub_title_0]["type"] == "Subtitle" - assert admin_response_data[0]["type"] == "Type" - assert admin_response_data[0]["volume"] == "Volume" - assert admin_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" - assert ( - admin_response_data[1]["contributors"][0]["contributor_type"] - == "Admin Con Type" - ) - assert admin_response_data[1]["contributors"][0]["name_type"] == "Personal" - assert admin_response_data[1]["creators"][0]["name"] == "Admin Name" - assert admin_response_data[1]["creators"][0]["name_type"] == "Personal" - assert admin_response_data[1]["edition"] == "Admin Edition" - assert admin_response_data[1]["first_page"] == "Admin First Page" - assert admin_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" - assert ( - admin_response_data[1]["identifiers"][0]["metadata_scheme"] - == "Admin Metadata Scheme" - ) - assert ( - admin_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" - ) - assert admin_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" - assert admin_response_data[1]["identifiers"][0]["type"] == "ARK" - assert admin_response_data[1]["issue"] == "Admin Issue" - assert admin_response_data[1]["last_page"] == "Admin Last Page" - assert admin_response_data[1]["number_type"] == "Admin Number Type" - assert admin_response_data[1]["number_value"] == "Admin Number Value" - assert admin_response_data[1]["publication_year"] == 2013 - assert admin_response_data[1]["publisher"] == "Admin Publisher" - assert admin_response_data[1]["relation_type"] == "Admin Relation Type" - assert admin_response_data[1]["titles"][0]["title"] == "Admin Title" - assert admin_response_data[1]["titles"][0]["type"] == "AlternativeTitle" - assert admin_response_data[1]["type"] == "Admin Type" - assert admin_response_data[1]["volume"] == "Admin Volume" - assert admin_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" - assert ( - admin_response_data[2]["contributors"][0]["contributor_type"] - == "Editor Con Type" - ) - assert admin_response_data[2]["contributors"][0]["name_type"] == "Personal" - assert admin_response_data[2]["creators"][0]["name"] == "Editor Name" - assert admin_response_data[2]["creators"][0]["name_type"] == "Personal" - assert admin_response_data[2]["edition"] == "Editor Edition" - assert admin_response_data[2]["first_page"] == "Editor First Page" - assert admin_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" - assert ( - admin_response_data[2]["identifiers"][0]["metadata_scheme"] - == "Editor Metadata Scheme" - ) - assert ( - admin_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" - ) - assert admin_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" - assert admin_response_data[2]["identifiers"][0]["type"] == "ARK" - assert admin_response_data[2]["issue"] == "Editor Issue" - assert admin_response_data[2]["last_page"] == "Editor Last Page" - assert admin_response_data[2]["number_type"] == "Editor Number Type" - assert admin_response_data[2]["number_value"] == "Editor Number Value" - assert admin_response_data[2]["publication_year"] == 2013 - assert admin_response_data[2]["publisher"] == "Editor Publisher" - assert admin_response_data[2]["relation_type"] == "Editor Relation Type" - assert admin_response_data[2]["titles"][0]["title"] == "Editor Title" - assert admin_response_data[2]["titles"][0]["type"] == "Subtitle" - assert admin_response_data[2]["type"] == "Editor Type" - assert admin_response_data[2]["volume"] == "Editor Volume" - - assert editor_response_data[0]["contributors"][0]["name"] == "Ndafsdame" - assert editor_response_data[0]["contributors"][0]["contributor_type"] == "Con Type" - assert editor_response_data[0]["contributors"][0]["name_type"] == "Personal" - assert editor_response_data[0]["creators"][0]["name"] == "Name" - assert editor_response_data[0]["creators"][0]["name_type"] == "Personal" - assert editor_response_data[0]["edition"] == "Edition" - assert editor_response_data[0]["first_page"] == "First Page" - assert editor_response_data[0]["identifiers"][0]["identifier"] == "Identifier" - assert ( - editor_response_data[0]["identifiers"][0]["metadata_scheme"] - == "Metadata Scheme" - ) - assert editor_response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" - assert editor_response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert editor_response_data[0]["identifiers"][0]["type"] == "ARK" - assert editor_response_data[0]["issue"] == "Issue" - assert editor_response_data[0]["last_page"] == "Last Page" - assert editor_response_data[0]["number_type"] == "Number Type" - assert editor_response_data[0]["number_value"] == "Number Value" - assert editor_response_data[0]["publication_year"] == 2013 - assert editor_response_data[0]["publisher"] == "Publisher" - assert editor_response_data[0]["relation_type"] == "Relation Type" - assert editor_response_data[0]["titles"][e_main_title_0]["title"] == "Title" - assert editor_response_data[0]["titles"][e_main_title_0]["type"] == "MainTitle" - assert editor_response_data[0]["titles"][e_sub_title_0]["title"] == "Title" - assert editor_response_data[0]["titles"][e_sub_title_0]["type"] == "Subtitle" - assert editor_response_data[0]["type"] == "Type" - assert editor_response_data[0]["volume"] == "Volume" - assert editor_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" - assert ( - editor_response_data[1]["contributors"][0]["contributor_type"] - == "Admin Con Type" - ) - assert editor_response_data[1]["contributors"][0]["name_type"] == "Personal" - assert editor_response_data[1]["creators"][0]["name"] == "Admin Name" - assert editor_response_data[1]["creators"][0]["name_type"] == "Personal" - assert editor_response_data[1]["edition"] == "Admin Edition" - assert editor_response_data[1]["first_page"] == "Admin First Page" - assert editor_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" - assert ( - editor_response_data[1]["identifiers"][0]["metadata_scheme"] - == "Admin Metadata Scheme" - ) - assert ( - editor_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" - ) - assert editor_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" - assert editor_response_data[1]["identifiers"][0]["type"] == "ARK" - assert editor_response_data[1]["issue"] == "Admin Issue" - assert editor_response_data[1]["last_page"] == "Admin Last Page" - assert editor_response_data[1]["number_type"] == "Admin Number Type" - assert editor_response_data[1]["number_value"] == "Admin Number Value" - assert editor_response_data[1]["publication_year"] == 2013 - assert editor_response_data[1]["publisher"] == "Admin Publisher" - assert editor_response_data[1]["relation_type"] == "Admin Relation Type" - assert editor_response_data[1]["titles"][0]["title"] == "Admin Title" - assert editor_response_data[1]["titles"][0]["type"] == "AlternativeTitle" - assert editor_response_data[1]["type"] == "Admin Type" - assert editor_response_data[1]["volume"] == "Admin Volume" - assert editor_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" - assert ( - editor_response_data[2]["contributors"][0]["contributor_type"] - == "Editor Con Type" - ) - assert editor_response_data[2]["contributors"][0]["name_type"] == "Personal" - assert editor_response_data[2]["creators"][0]["name"] == "Editor Name" - assert editor_response_data[2]["creators"][0]["name_type"] == "Personal" - assert editor_response_data[2]["edition"] == "Editor Edition" - assert editor_response_data[2]["first_page"] == "Editor First Page" - assert ( - editor_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" - ) - assert ( - editor_response_data[2]["identifiers"][0]["metadata_scheme"] - == "Editor Metadata Scheme" - ) - assert ( - editor_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" - ) - assert ( - editor_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" - ) - assert editor_response_data[2]["identifiers"][0]["type"] == "ARK" - assert editor_response_data[2]["issue"] == "Editor Issue" - assert editor_response_data[2]["last_page"] == "Editor Last Page" - assert editor_response_data[2]["number_type"] == "Editor Number Type" - assert editor_response_data[2]["number_value"] == "Editor Number Value" - assert editor_response_data[2]["publication_year"] == 2013 - assert editor_response_data[2]["publisher"] == "Editor Publisher" - assert editor_response_data[2]["relation_type"] == "Editor Relation Type" - assert editor_response_data[2]["titles"][0]["title"] == "Editor Title" - assert editor_response_data[2]["titles"][0]["type"] == "Subtitle" - assert editor_response_data[2]["type"] == "Editor Type" - assert editor_response_data[2]["volume"] == "Editor Volume" - - assert viewer_response_data[0]["contributors"][0]["name"] == "Ndafsdame" - assert viewer_response_data[0]["contributors"][0]["contributor_type"] == "Con Type" - assert viewer_response_data[0]["contributors"][0]["name_type"] == "Personal" - assert viewer_response_data[0]["creators"][0]["name"] == "Name" - assert viewer_response_data[0]["creators"][0]["name_type"] == "Personal" - assert viewer_response_data[0]["edition"] == "Edition" - assert viewer_response_data[0]["first_page"] == "First Page" - assert viewer_response_data[0]["identifiers"][0]["identifier"] == "Identifier" - assert ( - viewer_response_data[0]["identifiers"][0]["metadata_scheme"] - == "Metadata Scheme" - ) - assert viewer_response_data[0]["identifiers"][0]["scheme_type"] == "Scheme Type" - assert viewer_response_data[0]["identifiers"][0]["scheme_uri"] == "Scheme URI" - assert viewer_response_data[0]["identifiers"][0]["type"] == "ARK" - assert viewer_response_data[0]["issue"] == "Issue" - assert viewer_response_data[0]["last_page"] == "Last Page" - assert viewer_response_data[0]["number_type"] == "Number Type" - assert viewer_response_data[0]["number_value"] == "Number Value" - assert viewer_response_data[0]["publication_year"] == 2013 - assert viewer_response_data[0]["publisher"] == "Publisher" - assert viewer_response_data[0]["relation_type"] == "Relation Type" - assert viewer_response_data[0]["titles"][v_main_title_0]["title"] == "Title" - assert viewer_response_data[0]["titles"][v_main_title_0]["type"] == "MainTitle" - assert viewer_response_data[0]["titles"][v_sub_title_0]["title"] == "Title" - assert viewer_response_data[0]["titles"][v_sub_title_0]["type"] == "Subtitle" - assert viewer_response_data[0]["type"] == "Type" - assert viewer_response_data[0]["volume"] == "Volume" - assert viewer_response_data[1]["contributors"][0]["name"] == "Admin Ndafsdame" - assert ( - viewer_response_data[1]["contributors"][0]["contributor_type"] - == "Admin Con Type" - ) - assert viewer_response_data[1]["contributors"][0]["name_type"] == "Personal" - assert viewer_response_data[1]["creators"][0]["name"] == "Admin Name" - assert viewer_response_data[1]["creators"][0]["name_type"] == "Personal" - assert viewer_response_data[1]["edition"] == "Admin Edition" - assert viewer_response_data[1]["first_page"] == "Admin First Page" - assert viewer_response_data[1]["identifiers"][0]["identifier"] == "Admin Identifier" - assert ( - viewer_response_data[1]["identifiers"][0]["metadata_scheme"] - == "Admin Metadata Scheme" - ) - assert ( - viewer_response_data[1]["identifiers"][0]["scheme_type"] == "Admin Scheme Type" - ) - assert viewer_response_data[1]["identifiers"][0]["scheme_uri"] == "Admin Scheme URI" - assert viewer_response_data[1]["identifiers"][0]["type"] == "ARK" - assert viewer_response_data[1]["issue"] == "Admin Issue" - assert viewer_response_data[1]["last_page"] == "Admin Last Page" - assert viewer_response_data[1]["number_type"] == "Admin Number Type" - assert viewer_response_data[1]["number_value"] == "Admin Number Value" - assert viewer_response_data[1]["publication_year"] == 2013 - assert viewer_response_data[1]["publisher"] == "Admin Publisher" - assert viewer_response_data[1]["relation_type"] == "Admin Relation Type" - assert viewer_response_data[1]["titles"][0]["title"] == "Admin Title" - assert viewer_response_data[1]["titles"][0]["type"] == "AlternativeTitle" - assert viewer_response_data[1]["type"] == "Admin Type" - assert viewer_response_data[1]["volume"] == "Admin Volume" - assert viewer_response_data[2]["contributors"][0]["name"] == "Editor Ndafsdame" - assert ( - viewer_response_data[2]["contributors"][0]["contributor_type"] - == "Editor Con Type" - ) - assert viewer_response_data[2]["contributors"][0]["name_type"] == "Personal" - assert viewer_response_data[2]["creators"][0]["name"] == "Editor Name" - assert viewer_response_data[2]["creators"][0]["name_type"] == "Personal" - assert viewer_response_data[2]["edition"] == "Editor Edition" - assert viewer_response_data[2]["first_page"] == "Editor First Page" - assert ( - viewer_response_data[2]["identifiers"][0]["identifier"] == "Editor Identifier" - ) - assert ( - viewer_response_data[2]["identifiers"][0]["metadata_scheme"] - == "Editor Metadata Scheme" - ) - assert ( - viewer_response_data[2]["identifiers"][0]["scheme_type"] == "Editor Scheme Type" - ) - assert ( - viewer_response_data[2]["identifiers"][0]["scheme_uri"] == "Editor Scheme URI" - ) - assert viewer_response_data[2]["identifiers"][0]["type"] == "ARK" - assert viewer_response_data[2]["issue"] == "Editor Issue" - assert viewer_response_data[2]["last_page"] == "Editor Last Page" - assert viewer_response_data[2]["number_type"] == "Editor Number Type" - assert viewer_response_data[2]["number_value"] == "Editor Number Value" - assert viewer_response_data[2]["publication_year"] == 2013 - assert viewer_response_data[2]["publisher"] == "Editor Publisher" - assert viewer_response_data[2]["relation_type"] == "Editor Relation Type" - assert viewer_response_data[2]["titles"][0]["title"] == "Editor Title" - assert viewer_response_data[2]["titles"][0]["type"] == "Subtitle" - assert viewer_response_data[2]["type"] == "Editor Type" - assert viewer_response_data[2]["volume"] == "Editor Volume" - - -def test_delete_dataset_related_item_contributor_metadata(clients): + # assert len(response_data) == 3 + # assert len(admin_response_data) == 3 + # assert len(editor_response_data) == 3 + # assert len(viewer_response_data) == 3 + print(len(response_data), "lennnnnnnnnn") + assert response_data[0]["identifier"] == "test identifier" + assert response_data[0]["identifier_type"] == "test identifier type" + assert response_data[0]["relation_type"] == "test relation type" + assert response_data[0]["related_metadata_scheme"] == "test" + assert response_data[0]["scheme_uri"] == "test" + assert response_data[0]["scheme_type"] == "test" + assert response_data[0]["resource_type"] == "test" + assert response_data[1]["identifier"] == "admin test identifier" + assert response_data[1]["identifier_type"] == "test identifier type" + assert response_data[1]["relation_type"] == "test relation type" + assert response_data[1]["related_metadata_scheme"] == "test" + assert response_data[1]["scheme_uri"] == "test" + assert response_data[1]["scheme_type"] == "test" + assert response_data[1]["resource_type"] == "test" + assert response_data[2]["identifier"] == "editor test identifier" + assert response_data[2]["identifier_type"] == "test identifier type" + assert response_data[2]["relation_type"] == "test relation type" + assert response_data[2]["related_metadata_scheme"] == "test" + assert response_data[2]["scheme_uri"] == "test" + assert response_data[2]["scheme_type"] == "test" + assert response_data[2]["resource_type"] == "test" + + assert admin_response_data[0]["identifier"] == "test identifier" + assert admin_response_data[0]["identifier_type"] == "test identifier type" + assert admin_response_data[0]["relation_type"] == "test relation type" + assert admin_response_data[0]["related_metadata_scheme"] == "test" + assert admin_response_data[0]["scheme_uri"] == "test" + assert admin_response_data[0]["scheme_type"] == "test" + assert admin_response_data[0]["resource_type"] == "test" + assert admin_response_data[1]["identifier"] == "admin test identifier" + assert admin_response_data[1]["identifier_type"] == "test identifier type" + assert admin_response_data[1]["relation_type"] == "test relation type" + assert admin_response_data[1]["related_metadata_scheme"] == "test" + assert admin_response_data[1]["scheme_uri"] == "test" + assert admin_response_data[1]["scheme_type"] == "test" + assert admin_response_data[1]["resource_type"] == "test" + assert admin_response_data[2]["identifier"] == "editor test identifier" + assert admin_response_data[2]["identifier_type"] == "test identifier type" + assert admin_response_data[2]["relation_type"] == "test relation type" + assert admin_response_data[2]["related_metadata_scheme"] == "test" + assert admin_response_data[2]["scheme_uri"] == "test" + assert admin_response_data[2]["scheme_type"] == "test" + assert admin_response_data[2]["resource_type"] == "test" + + assert editor_response_data[0]["identifier"] == "test identifier" + assert editor_response_data[0]["identifier_type"] == "test identifier type" + assert editor_response_data[0]["relation_type"] == "test relation type" + assert editor_response_data[0]["related_metadata_scheme"] == "test" + assert editor_response_data[0]["scheme_uri"] == "test" + assert editor_response_data[0]["scheme_type"] == "test" + assert editor_response_data[0]["resource_type"] == "test" + assert editor_response_data[1]["identifier"] == "admin test identifier" + assert editor_response_data[1]["identifier_type"] == "test identifier type" + assert editor_response_data[1]["relation_type"] == "test relation type" + assert editor_response_data[1]["related_metadata_scheme"] == "test" + assert editor_response_data[1]["scheme_uri"] == "test" + assert editor_response_data[1]["scheme_type"] == "test" + assert editor_response_data[1]["resource_type"] == "test" + assert editor_response_data[2]["identifier"] == "editor test identifier" + assert editor_response_data[2]["identifier_type"] == "test identifier type" + assert editor_response_data[2]["relation_type"] == "test relation type" + assert editor_response_data[2]["related_metadata_scheme"] == "test" + assert editor_response_data[2]["scheme_uri"] == "test" + assert editor_response_data[2]["scheme_type"] == "test" + assert editor_response_data[2]["resource_type"] == "test" + + assert viewer_response_data[0]["identifier"] == "test identifier" + assert viewer_response_data[0]["identifier_type"] == "test identifier type" + assert viewer_response_data[0]["relation_type"] == "test relation type" + assert viewer_response_data[0]["related_metadata_scheme"] == "test" + assert viewer_response_data[0]["scheme_uri"] == "test" + assert viewer_response_data[0]["scheme_type"] == "test" + assert viewer_response_data[0]["resource_type"] == "test" + assert viewer_response_data[1]["identifier"] == "admin test identifier" + assert viewer_response_data[1]["identifier_type"] == "test identifier type" + assert viewer_response_data[1]["relation_type"] == "test relation type" + assert viewer_response_data[1]["related_metadata_scheme"] == "test" + assert viewer_response_data[1]["scheme_uri"] == "test" + assert viewer_response_data[1]["scheme_type"] == "test" + assert viewer_response_data[1]["resource_type"] == "test" + assert viewer_response_data[2]["identifier"] == "editor test identifier" + assert viewer_response_data[2]["identifier_type"] == "test identifier type" + assert viewer_response_data[2]["relation_type"] == "test relation type" + assert viewer_response_data[2]["related_metadata_scheme"] == "test" + assert viewer_response_data[2]["scheme_uri"] == "test" + assert viewer_response_data[2]["scheme_type"] == "test" + assert viewer_response_data[2]["resource_type"] == "test" + + +def test_delete_dataset_related_identifier_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - related item metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - related_item_id = pytest.global_dataset_related_item_id - contributor_id = pytest.global_dataset_related_item_contributor_id - admin_con_id = pytest.global_dataset_related_item_contributor_id_admin - editor_con_id = pytest.global_dataset_related_item_contributor_id_editor - - # pylint: disable=line-too-long - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{contributor_id}" - ) - # pylint: disable=line-too-long - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{contributor_id}" - ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{admin_con_id}" - ) - # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/contributor/{editor_con_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -def test_delete_dataset_related_item_creator_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - related item metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - related_item_id = pytest.global_dataset_related_item_id - creator_id = pytest.global_dataset_related_item_creator_id - admin_creator_id = pytest.global_dataset_related_item_creator_id_admin - editor_creator_id = pytest.global_dataset_related_item_creator_id_editor - - # pylint: disable=line-too-long - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{creator_id}" - ) - # pylint: disable=line-too-long - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{creator_id}" - ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{admin_creator_id}" - ) - # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/creator/{editor_creator_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -def test_delete_dataset_related_item_identifier_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - related item metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - related_item_id = pytest.global_dataset_related_item_id - identifier_id = pytest.global_dataset_related_item_identifier_id - admin_id_id = pytest.global_dataset_related_item_identifier_id_admin - editor_id_id = pytest.global_dataset_related_item_identifier_id_editor - - # pylint: disable=line-too-long - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{identifier_id}" - ) - # pylint: disable=line-too-long - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{identifier_id}" - ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{admin_id_id}" - ) - # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/identifier/{editor_id_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -def test_delete_dataset_related_item_title_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}' + When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - related item metadata content + Then check that the response is valid and retrieves the dataset + related identifier metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - related_item_id = pytest.global_dataset_related_item_id - main_title_id = pytest.global_dataset_related_item_main_title_id - sub_title_id = pytest.global_dataset_related_item_sub_title_id - admin_t_id = pytest.global_dataset_related_item_title_id_admin - editor_t_id = pytest.global_dataset_related_item_title_id_editor - # pylint: disable=line-too-long - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{main_title_id}" - ) - main_response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{main_title_id}" - ) - sub_response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{sub_title_id}" - ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{admin_t_id}" - ) - # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}/title/{editor_t_id}" - ) - - assert viewer_response.status_code == 403 - assert main_response.status_code == 403 # Main title cannot be deleted - assert sub_response.status_code == 204 # Main title cannot be deleted - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -def test_delete_dataset_related_item_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - related item metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - related_item_id = pytest.global_dataset_related_item_id - admin_ri_id = pytest.global_dataset_related_item_id_admin - editor_ri_id = pytest.global_dataset_related_item_id_editor + identifier_id = pytest.global_dataset_related_identifier_id + a_identifier_id = pytest.global_dataset_related_identifier_id_admin + e_identifier_id = pytest.global_dataset_related_identifier_id_editor viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" ) response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{related_item_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" ) admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{admin_ri_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{a_identifier_id}" ) editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item/{editor_ri_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{e_identifier_id}" ) assert viewer_response.status_code == 403 diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index 7851bdb7..d15c0f36 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -702,46 +702,20 @@ def test_get_version_dataset_metadata(clients): } ], ) - related_item_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-item", + related_identifier_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", json=[ { - "contributors": [ - { - "name": "Ndafsdame", - "contributor_type": "Con Type", - "name_type": "Personal", - } - ], - "creators": [{"name": "Name", "name_type": "Personal"}], - "edition": "Edition", - "first_page": "First Page", - "identifiers": [ - { - "identifier": "Identifier", - "metadata_scheme": "Metadata Scheme", - "scheme_type": "Scheme Type", - "scheme_uri": "Scheme URI", - "type": "ARK", - } - ], - "issue": "Issue", - "last_page": "Last Page", - "number_type": "Number Type", - "number_value": "Number Value", - "publication_year": 2013, - "publisher": "Publisher", - "relation_type": "Relation Type", - "titles": [ - {"title": "Title", "type": "MainTitle"}, - {"title": "Title", "type": "Subtitle"}, - ], - "type": "Type", - "volume": "Volume", + "identifier": "editor test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test" } ], ) - assert contributor_response.status_code == 201 assert creator_response.status_code == 201 assert date_response.status_code == 201 @@ -749,7 +723,7 @@ def test_get_version_dataset_metadata(clients): assert rights_response.status_code == 201 assert subject_response.status_code == 201 assert alt_identifier_response.status_code == 201 - assert related_item_response.status_code == 201 + assert related_identifier_response.status_code == 201 response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/dataset-metadata" @@ -774,120 +748,52 @@ def test_get_version_dataset_metadata(clients): # seach for main title index in response_data[n]["titles"] # pylint: disable=line-too-long - main_title_0 = next( - ( - index - for (index, d) in enumerate(response_data["related_items"][0]["titles"]) - if d["type"] == "MainTitle" - ), - None, - ) - # seach for subtitle index in response_data["related_items"][0]["titles"] - sub_title_0 = next( - ( - index - for (index, d) in enumerate(response_data["related_items"][0]["titles"]) - if d["type"] == "Subtitle" - ), - None, - ) - a_main_title_0 = next( - ( - index - for (index, d) in enumerate( - admin_response_data["related_items"][0]["titles"] - ) - if d["type"] == "MainTitle" - ), - None, - ) - a_sub_title_0 = next( - ( - index - for (index, d) in enumerate( - admin_response_data["related_items"][0]["titles"] - ) - if d["type"] == "Subtitle" - ), - None, - ) - e_main_title_0 = next( - ( - index - for (index, d) in enumerate( - editor_response_data["related_items"][0]["titles"] - ) - if d["type"] == "MainTitle" - ), - None, - ) - e_sub_title_0 = next( - ( - index - for (index, d) in enumerate( - editor_response_data["related_items"][0]["titles"] - ) - if d["type"] == "Subtitle" - ), - None, - ) assert response_data["contributors"][0]["given_name"] == "Given Name here" assert response_data["contributors"][0]["family_name"] == "Family Name here" assert response_data["contributors"][0]["name_type"] == "Personal" assert response_data["contributors"][0]["contributor_type"] == "Con Type" + assert response_data["dates"][0]["date"] == "01-01-1970" assert response_data["dates"][0]["type"] == "Type" + assert response_data["creators"][0]["given_name"] == "Given Name here" assert response_data["creators"][0]["family_name"] == "Family Name here" assert response_data["creators"][0]["name_type"] == "Personal" + assert response_data["funders"][0]["name"] == "Name" assert response_data["funders"][0]["identifier"] == "Identifier" + assert response_data["rights"][0]["identifier"] == "Identifier" assert response_data["rights"][0]["rights"] == "Rights" + assert response_data["subjects"][0]["subject"] == "Subject" + assert response_data["about"]["language"] == "English" - assert response_data["about"]["resource_type"] == "Editor Resource Type" + assert response_data["about"]["resource_type"] == "Resource Type" assert response_data["about"]["size"] == ["Size"] + assert response_data["access"]["type"] == "editor type" assert response_data["access"]["description"] == "editor description" + assert response_data["consent"]["noncommercial"] is True assert response_data["consent"]["geog_restrict"] is True assert response_data["consent"]["research_type"] is True + assert response_data["de_identification"]["direct"] is True assert response_data["de_identification"]["type"] == "Level" - assert response_data["publisher"]["publisher"] == "Publisher" + assert ( - response_data["publisher"]["managing_organization_name"] + response_data["managing_organization"]["managing_organization_name"] == "Managing Editor Organization Name" ) - assert response_data["identifiers"][0]["identifier"] == "identifier test" assert response_data["identifiers"][0]["type"] == "ARK" - assert response_data["related_items"][0]["publication_year"] == "1970" - assert response_data["related_items"][0]["publisher"] == "Publisher" - assert response_data["related_items"][0]["contributors"][0]["name"] == "Ndafsdame" - assert ( - response_data["related_items"][0]["contributors"][0]["contributor_type"] - == "Con Type" - ) - assert response_data["related_items"][0]["creators"][0]["name"] == "Name" - assert response_data["related_items"][0]["creators"][0]["name_type"] == "Personal" - assert response_data["related_items"][0]["titles"][main_title_0]["title"] == "Title" - assert ( - response_data["related_items"][0]["titles"][main_title_0]["type"] == "MainTitle" - ) - assert response_data["related_items"][0]["titles"][sub_title_0]["title"] == "Title" - assert ( - response_data["related_items"][0]["titles"][sub_title_0]["type"] == "Subtitle" - ) - assert ( - response_data["related_items"][0]["identifiers"][0]["identifier"] - == "Identifier" - ) - assert response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" - assert response_data["related_items"][0]["type"] == "Type" + + assert response_data["related_identifier"][0]["identifier"] == "editor test identifier" + assert response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert response_data["related_identifier"][0]["resource_type"] == "test" assert admin_response_data["contributors"][0]["given_name"] == "Given Name here" assert admin_response_data["contributors"][0]["family_name"] == "Family Name here" @@ -905,7 +811,7 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["subjects"][0]["subject"] == "Subject" assert admin_response_data["about"]["language"] == "English" - assert admin_response_data["about"]["resource_type"] == "Editor Resource Type" + assert admin_response_data["about"]["resource_type"] == "Resource Type" assert admin_response_data["about"]["size"] == ["Size"] assert admin_response_data["access"]["type"] == "editor type" assert admin_response_data["access"]["description"] == "editor description" @@ -914,51 +820,17 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["consent"]["research_type"] is True assert admin_response_data["de_identification"]["direct"] is True assert admin_response_data["de_identification"]["type"] == "Level" - assert admin_response_data["publisher"]["publisher"] == "Publisher" assert ( - admin_response_data["publisher"]["managing_organization_name"] + admin_response_data["managing_organization"]["managing_organization_name"] == "Managing Editor Organization Name" ) assert admin_response_data["identifiers"][0]["identifier"] == "identifier test" assert admin_response_data["identifiers"][0]["type"] == "ARK" - assert admin_response_data["related_items"][0]["publication_year"] == "1970" - assert admin_response_data["related_items"][0]["publisher"] == "Publisher" - assert ( - admin_response_data["related_items"][0]["contributors"][0]["name"] - == "Ndafsdame" - ) - assert ( - admin_response_data["related_items"][0]["contributors"][0]["contributor_type"] - == "Con Type" - ) - assert admin_response_data["related_items"][0]["creators"][0]["name"] == "Name" - assert ( - admin_response_data["related_items"][0]["creators"][0]["name_type"] - == "Personal" - ) - assert ( - admin_response_data["related_items"][0]["titles"][a_main_title_0]["title"] - == "Title" - ) - assert ( - admin_response_data["related_items"][0]["titles"][a_main_title_0]["type"] - == "MainTitle" - ) - assert ( - admin_response_data["related_items"][0]["titles"][a_sub_title_0]["title"] - == "Title" - ) - assert ( - admin_response_data["related_items"][0]["titles"][a_sub_title_0]["type"] - == "Subtitle" - ) - assert ( - admin_response_data["related_items"][0]["identifiers"][0]["identifier"] - == "Identifier" - ) - assert admin_response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" - assert admin_response_data["related_items"][0]["type"] == "Type" + + assert admin_response_data["related_identifier"][0]["identifier"] == "editor test identifier" + assert admin_response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert admin_response_data["related_identifier"][0]["resource_type"] == "test" assert editor_response_data["contributors"][0]["family_name"] == "Family Name here" assert editor_response_data["contributors"][0]["given_name"] == "Given Name here" @@ -976,7 +848,7 @@ def test_get_version_dataset_metadata(clients): assert editor_response_data["subjects"][0]["subject"] == "Subject" assert editor_response_data["about"]["language"] == "English" - assert editor_response_data["about"]["resource_type"] == "Editor Resource Type" + assert editor_response_data["about"]["resource_type"] == "Resource Type" assert editor_response_data["about"]["size"] == ["Size"] assert editor_response_data["access"]["type"] == "editor type" assert editor_response_data["access"]["description"] == "editor description" @@ -985,51 +857,17 @@ def test_get_version_dataset_metadata(clients): assert editor_response_data["consent"]["research_type"] is True assert editor_response_data["de_identification"]["direct"] is True assert editor_response_data["de_identification"]["type"] == "Level" - assert editor_response_data["publisher"]["publisher"] == "Publisher" assert ( - editor_response_data["publisher"]["managing_organization_name"] + editor_response_data["managing_organization"]["managing_organization_name"] == "Managing Editor Organization Name" ) assert editor_response_data["identifiers"][0]["identifier"] == "identifier test" assert editor_response_data["identifiers"][0]["type"] == "ARK" - assert editor_response_data["related_items"][0]["publication_year"] == "1970" - assert editor_response_data["related_items"][0]["publisher"] == "Publisher" - assert ( - editor_response_data["related_items"][0]["contributors"][0]["name"] - == "Ndafsdame" - ) - assert ( - editor_response_data["related_items"][0]["contributors"][0]["contributor_type"] - == "Con Type" - ) - assert editor_response_data["related_items"][0]["creators"][0]["name"] == "Name" - assert ( - editor_response_data["related_items"][0]["creators"][0]["name_type"] - == "Personal" - ) - assert ( - editor_response_data["related_items"][0]["titles"][e_main_title_0]["title"] - == "Title" - ) - assert ( - editor_response_data["related_items"][0]["titles"][e_main_title_0]["type"] - == "MainTitle" - ) - assert ( - editor_response_data["related_items"][0]["titles"][e_sub_title_0]["title"] - == "Title" - ) - assert ( - editor_response_data["related_items"][0]["titles"][e_sub_title_0]["type"] - == "Subtitle" - ) - assert ( - editor_response_data["related_items"][0]["identifiers"][0]["identifier"] - == "Identifier" - ) - assert editor_response_data["related_items"][0]["identifiers"][0]["type"] == "ARK" - assert editor_response_data["related_items"][0]["type"] == "Type" + + assert editor_response_data["related_identifier"][0]["identifier"] == "editor test identifier" + assert editor_response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert editor_response_data["related_identifier"][0]["resource_type"] == "test" def test_get_version_readme(clients): From 3d9538030071817387a09daaee4be85584c9de27 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Wed, 6 Mar 2024 13:09:54 -0800 Subject: [PATCH 446/505] =?UTF-8?q?feat:=20=E2=9C=A8=20update=20dashboard?= =?UTF-8?q?=20config=20for=20new=20modules?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/config/aireadi_config.py | 1634 ++++++++++++++++++++------ 1 file changed, 1270 insertions(+), 364 deletions(-) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index a23e0a92..375fd863 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -294,19 +294,19 @@ # Visualization Transforms # -# Overview -instrumentCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Survey Completions +surveyCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "compoundTransform", { - "key": "instrument-completion-status-by-site", + "key": "survey-completion-status-by-site", "strict": True, "transforms": [ { - "name": "Recruitment Survey", + "name": "Demographics Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "recruitment_survey_complete"], + "groups": ["siteid", "demographics_survey_complete"], "value": "record_id", "func": "count", } @@ -320,14 +320,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", + "name": "Demographics Survey", + "field": "demographics_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", + "name": "Demographics Survey", + "field": "demographics_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -340,11 +340,11 @@ }, }, { - "name": "FAQ Survey", + "name": "Health Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "faq_survey_complete"], + "groups": ["siteid", "health_survey_complete"], "value": "record_id", "func": "count", } @@ -358,14 +358,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "FAQ Survey", - "field": "faq_survey_complete", + "name": "Health Survey", + "field": "health_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "FAQ Survey", - "field": "faq_survey_complete", + "name": "Health Survey", + "field": "health_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -378,11 +378,11 @@ }, }, { - "name": "Screening Survey", + "name": "Substance Use Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "screening_survey_complete"], + "groups": ["siteid", "substance_use_survey_complete"], "value": "record_id", "func": "count", } @@ -396,14 +396,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Screening Survey", - "field": "screening_survey_complete", + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Screening Survey", - "field": "screening_survey_complete", + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -416,11 +416,11 @@ }, }, { - "name": "Preconsent Survey", + "name": "CES-D-10 Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "preconsent_survey_complete"], + "groups": ["siteid", "cesd10_survey_complete"], "value": "record_id", "func": "count", } @@ -434,14 +434,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -454,11 +454,11 @@ }, }, { - "name": "Consent Survey", + "name": "PAID-5 DM Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "consent_survey_complete"], + "groups": ["siteid", "paid5_dm_survey_complete"], "value": "record_id", "func": "count", } @@ -472,14 +472,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Consent Survey", - "field": "consent_survey_complete", + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Consent Survey", - "field": "consent_survey_complete", + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -492,14 +492,11 @@ }, }, { - "name": "Staff Consent Attestation Survey", + "name": "Diabetes Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": [ - "siteid", - "staff_consent_attestation_survey_complete", - ], + "groups": ["siteid", "diabetes_survey_complete"], "value": "record_id", "func": "count", } @@ -513,14 +510,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -533,11 +530,11 @@ }, }, { - "name": "Demographics Survey", + "name": "Dietary Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "demographics_survey_complete"], + "groups": ["siteid", "dietary_survey_complete"], "value": "record_id", "func": "count", } @@ -551,14 +548,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", + "name": "Dietary Survey", + "field": "dietary_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", + "name": "Dietary Survey", + "field": "dietary_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -571,11 +568,11 @@ }, }, { - "name": "Health Survey", + "name": "Opthalmic Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "health_survey_complete"], + "groups": ["siteid", "ophthalmic_survey_complete"], "value": "record_id", "func": "count", } @@ -589,14 +586,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -609,11 +606,11 @@ }, }, { - "name": "Substance Use Survey", + "name": "PhenX SDOH Combined Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "substance_use_survey_complete"], + "groups": ["siteid", "px_sdoh_combined_survey_complete"], "value": "record_id", "func": "count", } @@ -627,14 +624,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -647,11 +644,11 @@ }, }, { - "name": "CES-D-10 Survey", + "name": "PhenX Food Insecurity Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "cesd10_survey_complete"], + "groups": ["siteid", "px_food_insecurity_survey_complete"], "value": "record_id", "func": "count", } @@ -665,14 +662,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -685,11 +682,14 @@ }, }, { - "name": "PAID-5 DM Survey", + "name": "PhenX Neighborhood Environment Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "paid5_dm_survey_complete"], + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], "value": "record_id", "func": "count", } @@ -703,14 +703,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -723,11 +723,14 @@ }, }, { - "name": "Diabetes Survey", + "name": "PhenX Racial and Ethnic Discrimination Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "diabetes_survey_complete"], + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], "value": "record_id", "func": "count", } @@ -741,14 +744,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -761,11 +764,11 @@ }, }, { - "name": "Dietary Survey", + "name": "Medications Assessment", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "dietary_survey_complete"], + "groups": ["siteid", "meds_assessment_complete"], "value": "record_id", "func": "count", } @@ -779,14 +782,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", + "name": "Medications Assessment", + "field": "meds_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", + "name": "Medications Assessment", + "field": "meds_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -798,12 +801,22 @@ }, }, }, - { - "name": "Opthalmic Survey", + ], + }, +) + +# Recruitment Operations +recruitmentOperationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "compoundTransform", + { + "key": "recruitment-operations-status-by-site", + "strict": True, + "transforms": [{ + "name": "Recruitment Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "ophthalmic_survey_complete"], + "groups": ["siteid", "recruitment_survey_complete"], "value": "record_id", "func": "count", } @@ -817,14 +830,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -837,11 +850,11 @@ }, }, { - "name": "PhenX SDOH Combined Survey", + "name": "FAQ Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "groups": ["siteid", "faq_survey_complete"], "value": "record_id", "func": "count", } @@ -855,14 +868,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", + "name": "FAQ Survey", + "field": "faq_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", + "name": "FAQ Survey", + "field": "faq_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -875,11 +888,11 @@ }, }, { - "name": "PhenX Food Insecurity Survey", + "name": "Screening Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "px_food_insecurity_survey_complete"], + "groups": ["siteid", "screening_survey_complete"], "value": "record_id", "func": "count", } @@ -893,14 +906,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", + "name": "Screening Survey", + "field": "screening_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", + "name": "Screening Survey", + "field": "screening_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -913,14 +926,11 @@ }, }, { - "name": "PhenX Neighborhood Environment Survey", + "name": "Preconsent Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": [ - "siteid", - "px_neighborhood_environment_survey_complete", - ], + "groups": ["siteid", "preconsent_survey_complete"], "value": "record_id", "func": "count", } @@ -934,14 +944,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -954,14 +964,11 @@ }, }, { - "name": "PhenX Racial and Ethnic Discrimination Survey", + "name": "Consent Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": [ - "siteid", - "px_racial_ethnic_discrimination_survey_complete", - ], + "groups": ["siteid", "consent_survey_complete"], "value": "record_id", "func": "count", } @@ -975,14 +982,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", + "name": "Consent Survey", + "field": "consent_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", + "name": "Consent Survey", + "field": "consent_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -995,11 +1002,14 @@ }, }, { - "name": "Decline Participation Survey", + "name": "Staff Consent Attestation Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "decline_participation_survey_complete"], + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], "value": "record_id", "func": "count", } @@ -1013,15 +1023,15 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", - "missing_value": missing_value_generic, + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, "astype": str, }, "value": { @@ -1032,7 +1042,7 @@ }, }, }, - { + { "name": "Study Enrollment Survey", "vtype": "DoubleCategorical", "methods": [ @@ -1147,11 +1157,11 @@ }, }, { - "name": "Medications Assessment", + "name": "Data Management Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "meds_assessment_complete"], + "groups": ["siteid", "data_management_complete"], "value": "record_id", "func": "count", } @@ -1165,14 +1175,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", + "name": "Data Management Survey", + "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", + "name": "Data Management Survey", + "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1184,12 +1194,23 @@ }, }, }, + ], + }, +) + +# Recruitment Counts by Site +raceRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-recruitment-by-site", + "strict": True, + "transforms": [ { - "name": "Physical Assessment", - "vtype": "DoubleCategorical", + "name": "Race Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "physical_assessment_complete"], + "groups": ["siteid", "race", "scrdate"], "value": "record_id", "func": "count", } @@ -1202,32 +1223,42 @@ "astype": str, }, "group": { - "remap": lambda x: x["name"], - "name": "Physical Assessment", - "field": "physical_assessment_complete", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { - "name": "Physical Assessment", - "field": "physical_assessment_complete", + "x": { + "name": "Week of the Year", + "field": "scrdate", "missing_value": missing_value_generic, "astype": str, }, - "value": { - "name": "Count (N)", + "y": { + "name": "Cumulative Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, }, }, + ], + }, +) + +# Recruitment Counts by Site +phenotypeRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-recruitment-by-site", + "strict": True, + "transforms": [ { - "name": "BCVA", - "vtype": "DoubleCategorical", + "name": "Phenotype Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "bcva_complete"], + "groups": ["siteid", "phenotypes", "scrdate"], "value": "record_id", "func": "count", } @@ -1240,32 +1271,42 @@ "astype": str, }, "group": { - "remap": lambda x: x["name"], - "name": "BCVA", - "field": "bcva_complete", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { - "name": "BCVA", - "field": "bcva_complete", + "x": { + "name": "Week of the Year", + "field": "scrdate", "missing_value": missing_value_generic, "astype": str, }, - "value": { - "name": "Count (N)", + "y": { + "name": "Cumulative Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, }, }, + ], + }, +) + +# Race & Sex Counts by Race +raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-sex-by-site", + "strict": True, + "transforms": [ { - "name": "Photopic MARS", + "name": "Race & Sex by Site", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "photopic_mars_complete"], + "groups": ["scrsex", "race", "siteid"], "value": "record_id", "func": "count", } @@ -1275,18 +1316,16 @@ "name": "Site", "field": "siteid", "missing_value": missing_value_generic, - "astype": str, }, "group": { - "remap": lambda x: x["name"], - "name": "Photopic MARS", - "field": "photopic_mars_complete", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Photopic MARS", - "field": "photopic_mars_complete", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, @@ -1298,12 +1337,23 @@ }, }, }, + ], + }, +) + +# Phenotype & Sex Counts by Race +phenotypeSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-sex-by-site", + "strict": True, + "transforms": [ { - "name": "Mesopic MARS", + "name": "Phenotype & Sex by Site", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "mesopic_mars_complete"], + "groups": ["scrsex", "phenotypes", "siteid"], "value": "record_id", "func": "count", } @@ -1313,18 +1363,16 @@ "name": "Site", "field": "siteid", "missing_value": missing_value_generic, - "astype": str, }, "group": { - "remap": lambda x: x["name"], - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, @@ -1336,33 +1384,42 @@ }, }, }, + ], + }, +) + +# Phenotype & Site Counts by Sex +phenotypeSiteBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-site-by-sex", + "strict": True, + "transforms": [ { - "name": "Monofilament", + "name": "Phenotype & Site by Sex", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "monofilament_complete"], + "groups": ["scrsex", "phenotypes", "siteid"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, - "astype": str, }, "group": { - "remap": lambda x: x["name"], - "name": "Monofilament", - "field": "monofilament_complete", + "name": "Site", + "field": "siteid", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Monofilament", - "field": "monofilament_complete", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, @@ -1374,33 +1431,42 @@ }, }, }, + ], + }, +) + +# Phenotype & Race Counts by Sex +phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-race-by-sex", + "strict": True, + "transforms": [ { - "name": "MOCA", + "name": "Phenotype & Race by Sex", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "moca_complete"], + "groups": ["phenotypes", "race", "scrsex"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, - "astype": str, }, "group": { - "remap": lambda x: x["name"], - "name": "MOCA", - "field": "moca_complete", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "MOCA", - "field": "moca_complete", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, @@ -1412,12 +1478,22 @@ }, }, }, + ], + }, +) + +currentMedicationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "current-medications-by-site", + "strict": True, + "transforms": [ { - "name": "ECG Survey", + "name": "Current Medications by Site", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "ecg_complete"], + "groups": ["siteid", "current_medications", "scrsex"], "value": "record_id", "func": "count", } @@ -1430,32 +1506,42 @@ "astype": str, }, "group": { - "remap": lambda x: x["name"], - "name": "ECG Survey", - "field": "ecg_complete", + "name": "Current Medication Count", + "field": "current_medications", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "ECG Survey", - "field": "ecg_complete", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, "value": { - "name": "Count (N)", + "name": "Participants (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, }, - }, + } + ], + }, +) + +# Overview +instrumentCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "compoundTransform", + { + "key": "instrument-completion-status-by-site", + "strict": True, + "transforms": [ { - "name": "Lab Results Survey", + "name": "Recruitment Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "lab_results_complete"], + "groups": ["siteid", "recruitment_survey_complete"], "value": "record_id", "func": "count", } @@ -1469,14 +1555,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Lab Results Survey", - "field": "lab_results_complete", + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Lab Results Survey", - "field": "lab_results_complete", + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1489,11 +1575,11 @@ }, }, { - "name": "Specimen Management", + "name": "FAQ Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "specimen_management_complete"], + "groups": ["siteid", "faq_survey_complete"], "value": "record_id", "func": "count", } @@ -1507,14 +1593,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Specimen Management", - "field": "specimen_management_complete", + "name": "FAQ Survey", + "field": "faq_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Specimen Management", - "field": "specimen_management_complete", + "name": "FAQ Survey", + "field": "faq_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1527,11 +1613,11 @@ }, }, { - "name": "Device Return", + "name": "Screening Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "device_return_complete"], + "groups": ["siteid", "screening_survey_complete"], "value": "record_id", "func": "count", } @@ -1545,14 +1631,897 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Device Return", - "field": "device_return_complete", + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Preconsent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Consent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Staff Consent Attestation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Decline Participation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "decline_participation_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Study Enrollment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "study_enrollment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Driving Record", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "driving_record_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Physical Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "physical_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "BCVA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Photopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Photopic MARS", + "field": "photopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Device Return", - "field": "device_return_complete", + "name": "Photopic MARS", + "field": "photopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1565,11 +2534,11 @@ }, }, { - "name": "Disposition Survey", + "name": "Mesopic MARS", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "disposition_complete"], + "groups": ["siteid", "mesopic_mars_complete"], "value": "record_id", "func": "count", } @@ -1583,14 +2552,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Disposition Survey", - "field": "disposition_complete", + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Disposition Survey", - "field": "disposition_complete", + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1603,11 +2572,11 @@ }, }, { - "name": "Data Management Survey", + "name": "Monofilament", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "data_management_complete"], + "groups": ["siteid", "monofilament_complete"], "value": "record_id", "func": "count", } @@ -1621,14 +2590,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Data Management Survey", - "field": "data_management_complete", + "name": "Monofilament", + "field": "monofilament_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Data Management Survey", - "field": "data_management_complete", + "name": "Monofilament", + "field": "monofilament_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1640,23 +2609,12 @@ }, }, }, - ], - }, -) - -# Recruitment Counts by Site -raceRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-recruitment-by-site", - "strict": True, - "transforms": [ { - "name": "Race Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", + "name": "MOCA", + "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "race", "scrdate"], + "groups": ["siteid", "moca_complete"], "value": "record_id", "func": "count", } @@ -1669,42 +2627,32 @@ "astype": str, }, "group": { - "name": "Race", - "field": "race", + "remap": lambda x: x["name"], + "name": "MOCA", + "field": "moca_complete", "missing_value": missing_value_generic, "astype": str, }, - "x": { - "name": "Week of the Year", - "field": "scrdate", + "subgroup": { + "name": "MOCA", + "field": "moca_complete", "missing_value": missing_value_generic, "astype": str, }, - "y": { - "name": "Cumulative Count (N)", + "value": { + "name": "Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, }, }, - ], - }, -) - -# Recruitment Counts by Site -phenotypeRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-recruitment-by-site", - "strict": True, - "transforms": [ { - "name": "Phenotype Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", + "name": "ECG Survey", + "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "phenotypes", "scrdate"], + "groups": ["siteid", "ecg_complete"], "value": "record_id", "func": "count", } @@ -1717,42 +2665,32 @@ "astype": str, }, "group": { - "name": "Phenotype", - "field": "phenotypes", + "remap": lambda x: x["name"], + "name": "ECG Survey", + "field": "ecg_complete", "missing_value": missing_value_generic, "astype": str, }, - "x": { - "name": "Week of the Year", - "field": "scrdate", + "subgroup": { + "name": "ECG Survey", + "field": "ecg_complete", "missing_value": missing_value_generic, "astype": str, }, - "y": { - "name": "Cumulative Count (N)", + "value": { + "name": "Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, }, }, - ], - }, -) - -# Race & Sex Counts by Race -raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-sex-by-site", - "strict": True, - "transforms": [ { - "name": "Race & Sex by Site", + "name": "Lab Results Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["scrsex", "race", "siteid"], + "groups": ["siteid", "lab_results_complete"], "value": "record_id", "func": "count", } @@ -1762,16 +2700,18 @@ "name": "Site", "field": "siteid", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Sex", - "field": "scrsex", + "remap": lambda x: x["name"], + "name": "Lab Results Survey", + "field": "lab_results_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Race", - "field": "race", + "name": "Lab Results Survey", + "field": "lab_results_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1783,23 +2723,12 @@ }, }, }, - ], - }, -) - -# Phenotype & Sex Counts by Race -phenotypeSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-sex-by-site", - "strict": True, - "transforms": [ { - "name": "Phenotype & Sex by Site", + "name": "Specimen Management", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["scrsex", "phenotypes", "siteid"], + "groups": ["siteid", "specimen_management_complete"], "value": "record_id", "func": "count", } @@ -1809,16 +2738,18 @@ "name": "Site", "field": "siteid", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Sex", - "field": "scrsex", + "remap": lambda x: x["name"], + "name": "Specimen Management", + "field": "specimen_management_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Phenotype", - "field": "phenotypes", + "name": "Specimen Management", + "field": "specimen_management_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1830,42 +2761,33 @@ }, }, }, - ], - }, -) - -# Phenotype & Site Counts by Sex -phenotypeSiteBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-site-by-sex", - "strict": True, - "transforms": [ { - "name": "Phenotype & Site by Sex", + "name": "Device Return", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["scrsex", "phenotypes", "siteid"], + "groups": ["siteid", "device_return_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Sex", - "field": "scrsex", + "name": "Site", + "field": "siteid", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Site", - "field": "siteid", + "remap": lambda x: x["name"], + "name": "Device Return", + "field": "device_return_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Phenotype", - "field": "phenotypes", + "name": "Device Return", + "field": "device_return_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1877,42 +2799,33 @@ }, }, }, - ], - }, -) - -# Phenotype & Race Counts by Sex -phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-race-by-sex", - "strict": True, - "transforms": [ { - "name": "Phenotype & Race by Sex", + "name": "Disposition Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["siteid", "disposition_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Sex", - "field": "scrsex", + "name": "Site", + "field": "siteid", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Phenotype", - "field": "phenotypes", + "remap": lambda x: x["name"], + "name": "Disposition Survey", + "field": "disposition_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Race", - "field": "race", + "name": "Disposition Survey", + "field": "disposition_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1924,22 +2837,12 @@ }, }, }, - ], - }, -) - -currentMedicationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "current-medications-by-site", - "strict": True, - "transforms": [ { - "name": "Current Medications by Site", + "name": "Data Management Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "current_medications", "scrsex"], + "groups": ["siteid", "data_management_complete"], "value": "record_id", "func": "count", } @@ -1952,31 +2855,34 @@ "astype": str, }, "group": { - "name": "Current Medication Count", - "field": "current_medications", + "remap": lambda x: x["name"], + "name": "Data Management Survey", + "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Sex", - "field": "scrsex", + "name": "Data Management Survey", + "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, "value": { - "name": "Participants (N)", + "name": "Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, }, - } + }, ], }, ) moduleTransformConfigs: Dict[str, Any] = { "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, + "survey-completion-status-by-site": surveyCompletionStatusBySiteTransformConfig, + "recruitment-operations-status-by-site": recruitmentOperationsBySiteTransformConfig, "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, From 13e7737ff1a0fc4e5a8afca5d430a523778e6d5c Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 11 Mar 2024 15:07:47 -0700 Subject: [PATCH 447/505] =?UTF-8?q?=F0=9F=94=A8=20chore:=20add=20table=20r?= =?UTF-8?q?eset?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev/drop_tables.sql | 51 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 dev/drop_tables.sql diff --git a/dev/drop_tables.sql b/dev/drop_tables.sql new file mode 100644 index 00000000..ad8e1b08 --- /dev/null +++ b/dev/drop_tables.sql @@ -0,0 +1,51 @@ +DROP TABLE IF EXISTS study CASCADE; +DROP TABLE IF EXISTS dataset CASCADE; +DROP TABLE IF EXISTS email_verification CASCADE; +DROP TABLE IF EXISTS invited_study_contributor CASCADE; +DROP TABLE IF EXISTS notification CASCADE; +DROP TABLE IF EXISTS participant CASCADE; +DROP TABLE IF EXISTS study_contributor CASCADE; +DROP TABLE IF EXISTS study_arm CASCADE; +DROP TABLE IF EXISTS study_available_ipd CASCADE; +DROP TABLE IF EXISTS study_contact CASCADE; +DROP TABLE IF EXISTS study_dashboard CASCADE; +DROP TABLE IF EXISTS study_description CASCADE; +DROP TABLE IF EXISTS study_design CASCADE; +DROP TABLE IF EXISTS study_eligibility CASCADE; +DROP TABLE IF EXISTS study_identification CASCADE; +DROP TABLE IF EXISTS study_intervention CASCADE; +DROP TABLE IF EXISTS study_ipdsharing CASCADE; +DROP TABLE IF EXISTS study_link CASCADE; +DROP TABLE IF EXISTS study_location CASCADE; +DROP TABLE IF EXISTS study_other CASCADE; +DROP TABLE IF EXISTS study_overall_official CASCADE; +DROP TABLE IF EXISTS study_reference CASCADE; +DROP TABLE IF EXISTS study_sponsors_collaborators CASCADE; +DROP TABLE IF EXISTS study_status CASCADE; +DROP TABLE IF EXISTS study_redcap CASCADE; +DROP TABLE IF EXISTS token_blacklist CASCADE; +DROP TABLE IF EXISTS user_details CASCADE; +DROP TABLE IF EXISTS dataset_contributor CASCADE; +DROP TABLE IF EXISTS dataset_related_item CASCADE; +DROP TABLE IF EXISTS dataset_access CASCADE; +DROP TABLE IF EXISTS dataset_alternate_identifier CASCADE; +DROP TABLE IF EXISTS dataset_consent CASCADE; +DROP TABLE IF EXISTS dataset_date CASCADE; +DROP TABLE IF EXISTS dataset_de_ident_level CASCADE; +DROP TABLE IF EXISTS dataset_description CASCADE; +DROP TABLE IF EXISTS dataset_funder CASCADE; +DROP TABLE IF EXISTS dataset_healthsheet CASCADE; +DROP TABLE IF EXISTS dataset_other CASCADE; +DROP TABLE IF EXISTS dataset_record_keys CASCADE; +DROP TABLE IF EXISTS dataset_related_item_contributor CASCADE; +DROP TABLE IF EXISTS dataset_related_item_identifier CASCADE; +DROP TABLE IF EXISTS dataset_related_item_other CASCADE; +DROP TABLE IF EXISTS dataset_related_item_title CASCADE; +DROP TABLE IF EXISTS dataset_rights CASCADE; +DROP TABLE IF EXISTS dataset_subject CASCADE; +DROP TABLE IF EXISTS dataset_title CASCADE; +DROP TABLE IF EXISTS version CASCADE; +DROP TABLE IF EXISTS version_participants CASCADE; +DROP TABLE IF EXISTS version_readme CASCADE; +DROP TABLE IF EXISTS published_dataset CASCADE; +DROP TABLE IF EXISTS alembic_version CASCADE; \ No newline at end of file From fa2307ba43eda7f16862d240a803f424bcef187d Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Mon, 11 Mar 2024 15:07:57 -0700 Subject: [PATCH 448/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20study=20metadata?= =?UTF-8?q?=20changes=20(#50)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: dataset rights add scheme uri * feat: add study_location list table * fix: location list relation * feat: added study metadata tables * style: format * fix: study design added is_patient_registry col * fix: added new columns to existing tables * fix: remove tables * fix: added column to study table * style: format * fix: study * fix: add new endpoints * fix: sponsor and collab endpoints * fix: central contact * fix: sponsors * fix: collaborators * fix: endpoints * fix: condition and keyword schema * style: format * fix: test dataset metadata * fix: finalize study metadata endpoints * fix: collabs test * fix: test keywords and oversight test * fix: finalized study metadata test * fix: dataset metasdata test * fix: study version metadata test * style: format * fix: test * fix: study acronym * fix: study metadata * fix: study design --- apis/__init__.py | 31 +- .../dataset_managing_organization.py | 7 +- .../dataset_related_identifier.py | 20 +- apis/dataset_metadata/dataset_rights.py | 2 + apis/study.py | 20 +- apis/study_metadata/study_available_ipd.py | 127 - apis/study_metadata/study_central_contact.py | 161 ++ apis/study_metadata/study_collaborators.py | 109 + apis/study_metadata/study_conditions.py | 107 + apis/study_metadata/study_contact.py | 144 - apis/study_metadata/study_design.py | 3 +- apis/study_metadata/study_eligibility.py | 6 +- apis/study_metadata/study_ipdsharing.py | 106 - apis/study_metadata/study_keywords.py | 107 + apis/study_metadata/study_link.py | 104 - apis/study_metadata/study_other.py | 404 ++- apis/study_metadata/study_overall_official.py | 29 +- apis/study_metadata/study_oversight.py | 76 + apis/study_metadata/study_reference.py | 106 - apis/study_metadata/study_sponsors.py | 164 ++ .../study_sponsors_collaborators.py | 172 -- model/__init__.py | 26 +- model/dataset_metadata/dataset_rights.py | 6 + model/study.py | 83 +- model/study_metadata/study_available_ipd.py | 65 - model/study_metadata/study_central_contact.py | 103 + ...dy_reference.py => study_collaborators.py} | 35 +- model/study_metadata/study_conditions.py | 73 + model/study_metadata/study_contact.py | 82 - model/study_metadata/study_design.py | 8 +- model/study_metadata/study_eligibility.py | 10 +- model/study_metadata/study_ipdsharing.py | 77 - model/study_metadata/study_keywords.py | 73 + model/study_metadata/study_link.py | 64 - model/study_metadata/study_location.py | 5 + .../study_location_contact_list.py | 89 + model/study_metadata/study_other.py | 28 +- .../study_metadata/study_overall_official.py | 43 +- model/study_metadata/study_oversight.py | 59 + model/study_metadata/study_sponsors.py | 147 + .../study_sponsors_collaborators.py | 96 - tests/conftest.py | 10 + tests/functional/test_study_api.py | 14 + .../test_study_dataset_metadata_api.py | 131 +- tests/functional/test_study_metadata_api.py | 2472 ++++++++++------- tests/functional/test_study_version_api.py | 411 +-- 46 files changed, 3509 insertions(+), 2706 deletions(-) delete mode 100644 apis/study_metadata/study_available_ipd.py create mode 100644 apis/study_metadata/study_central_contact.py create mode 100644 apis/study_metadata/study_collaborators.py create mode 100644 apis/study_metadata/study_conditions.py delete mode 100644 apis/study_metadata/study_contact.py delete mode 100644 apis/study_metadata/study_ipdsharing.py create mode 100644 apis/study_metadata/study_keywords.py delete mode 100644 apis/study_metadata/study_link.py create mode 100644 apis/study_metadata/study_oversight.py delete mode 100644 apis/study_metadata/study_reference.py create mode 100644 apis/study_metadata/study_sponsors.py delete mode 100644 apis/study_metadata/study_sponsors_collaborators.py delete mode 100644 model/study_metadata/study_available_ipd.py create mode 100644 model/study_metadata/study_central_contact.py rename model/study_metadata/{study_reference.py => study_collaborators.py} (60%) create mode 100644 model/study_metadata/study_conditions.py delete mode 100644 model/study_metadata/study_contact.py delete mode 100644 model/study_metadata/study_ipdsharing.py create mode 100644 model/study_metadata/study_keywords.py delete mode 100644 model/study_metadata/study_link.py create mode 100644 model/study_metadata/study_location_contact_list.py create mode 100644 model/study_metadata/study_oversight.py create mode 100644 model/study_metadata/study_sponsors.py delete mode 100644 model/study_metadata/study_sponsors_collaborators.py diff --git a/apis/__init__.py b/apis/__init__.py index 652959de..c58a7559 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -18,8 +18,8 @@ from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder from .dataset_metadata.dataset_healthsheet import api as healthsheet -from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_managing_organization import api as managing_organization +from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_related_identifier import api as related_identifier from .dataset_metadata.dataset_rights import api as rights from .dataset_metadata.dataset_subject import api as subject @@ -29,20 +29,21 @@ from .redcap import api as redcap from .study import api as study_api from .study_metadata.study_arm import api as arm -from .study_metadata.study_available_ipd import api as available_ipd -from .study_metadata.study_contact import api as contact +from .study_metadata.study_central_contact import api as central_contact +from .study_metadata.study_collaborators import api as collaborators +from .study_metadata.study_conditions import api as conditions from .study_metadata.study_description import api as study_description from .study_metadata.study_design import api as design from .study_metadata.study_eligibility import api as eligibility from .study_metadata.study_identification import api as identification from .study_metadata.study_intervention import api as intervention -from .study_metadata.study_ipdsharing import api as ipdsharing -from .study_metadata.study_link import api as link + +# from .study_metadata.study_other import api as other +from .study_metadata.study_keywords import api as keywords from .study_metadata.study_location import api as location -from .study_metadata.study_other import api as other from .study_metadata.study_overall_official import api as overall_official -from .study_metadata.study_reference import api as reference -from .study_metadata.study_sponsors_collaborators import api as sponsors_collaborator +from .study_metadata.study_oversight import api as oversight +from .study_metadata.study_sponsors import api as sponsors from .study_metadata.study_status import api as status from .user import api as user from .utils import api as utils @@ -77,18 +78,18 @@ "participants_api", "study_api", "arm", - "available_ipd", - "contact", + "central_contact", "design", "eligibility", "intervention", - "ipdsharing", - "link", "location", - "other", + # "other", + "keywords", + "conditions", + "oversight", "overall_official", - "reference", - "sponsors_collaborator", + "sponsors", + "collaborators", "status", "user", "identification", diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index c04ce441..9319426e 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -8,7 +8,6 @@ from apis.authentication import is_granted from apis.dataset_metadata_namespace import api - dataset_managing_organization = api.model( "DatasetManagingOrganization", { @@ -51,12 +50,8 @@ def put(self, study_id: int, dataset_id: int): "managing_organization_ror_id": { "type": "string", }, - }, - "required": [ - "managing_organization_name", - "managing_organization_ror_id" - ], + "required": ["managing_organization_name", "managing_organization_ror_id"], } try: validate(instance=request.json, schema=schema) diff --git a/apis/dataset_metadata/dataset_related_identifier.py b/apis/dataset_metadata/dataset_related_identifier.py index b4b76888..3e2ce088 100644 --- a/apis/dataset_metadata/dataset_related_identifier.py +++ b/apis/dataset_metadata/dataset_related_identifier.py @@ -88,13 +88,17 @@ def post(self, study_id: int, dataset_id: int): list_of_elements = [] for i in data: if "id" in i and i["id"]: - dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get(i["id"]) + dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get( + i["id"] + ) if not dataset_related_identifier_: return f"{i['id']} Id is not found", 404 dataset_related_identifier_.update(i) list_of_elements.append(dataset_related_identifier_.to_dict()) elif "id" not in i or not i["id"]: - dataset_related_identifier_ = model.DatasetRelatedIdentifier.from_data(data_obj, i) + dataset_related_identifier_ = model.DatasetRelatedIdentifier.from_data( + data_obj, i + ) model.db.session.add(dataset_related_identifier_) list_of_elements.append(dataset_related_identifier_.to_dict()) model.db.session.commit() @@ -111,16 +115,18 @@ class DatasetRelatedIdentifierUpdate(Resource): @api.response(204, "Success") @api.response(400, "Validation Error") def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - related_identifier_id: int, + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_identifier_id: int, ): """Delete dataset related identifier""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 - dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get(related_identifier_id) + dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get( + related_identifier_id + ) model.db.session.delete(dataset_related_identifier_) model.db.session.commit() diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 86ddd2ac..404558fc 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -18,6 +18,7 @@ "uri": fields.String(required=True), "identifier": fields.String(required=True), "identifier_scheme": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), "license_text": fields.String(required=True), }, ) @@ -58,6 +59,7 @@ def post(self, study_id: int, dataset_id: int): "id": {"type": "string"}, "identifier": {"type": "string"}, "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, "rights": {"type": "string", "minLength": 1}, "uri": {"type": "string"}, "license_text": {"type": "string"}, diff --git a/apis/study.py b/apis/study.py index 931b0151..c20ad6a7 100644 --- a/apis/study.py +++ b/apis/study.py @@ -62,11 +62,12 @@ def post(self): # Schema validation schema = { "type": "object", - "required": ["title", "image"], + "required": ["title", "image", "acronym"], "additionalProperties": False, "properties": { - "title": {"type": "string", "minLength": 1}, - "image": {"type": "string", "minLength": 1}, + "title": {"type": "string", "minLength": 1, "maxLength": 300}, + "acronym": {"type": "string", "maxLength": 14}, + "image": {"type": "string"}, }, } @@ -114,11 +115,12 @@ def put(self, study_id: int): # Schema validation schema = { "type": "object", - "required": ["title", "image"], + "required": ["title", "image", "acronym"], "additionalProperties": False, "properties": { "title": {"type": "string", "minLength": 1}, "image": {"type": "string", "minLength": 1}, + "acronym": {"type": "string", "minLength": 1, "maxLength": 14}, }, } @@ -147,16 +149,6 @@ def delete(self, study_id: int): if not is_granted("delete_study", study): return "Access denied, you can not delete study", 403 - # for d in study.dataset: - # for version in d.dataset_versions: - # version.participants.clear() - # for d in study.dataset: - # for version in d.dataset_versions: - # model.db.session.delete(version) - # model.db.session.delete(d) - # for p in study.participants: - # model.db.session.delete(p) - model.db.session.delete(study) model.db.session.commit() diff --git a/apis/study_metadata/study_available_ipd.py b/apis/study_metadata/study_available_ipd.py deleted file mode 100644 index 7e12f5ba..00000000 --- a/apis/study_metadata/study_available_ipd.py +++ /dev/null @@ -1,127 +0,0 @@ -"""API routes for study available ipd metadata""" - -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_available = api.model( - "StudyAvailable", - { - "id": fields.String(required=True), - "identifier": fields.String(required=True), - "type": fields.String(required=True), - "comment": fields.String(required=True), - "url": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/available-ipd") -class StudyAvailableResource(Resource): - """Study Available Metadata""" - - @api.doc("available") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_available) - def get(self, study_id: int): - """Get study available metadata""" - study_ = model.Study.query.get(study_id) - - study_available_ipd_ = study_.study_available_ipd - - sorted_study_available_ipd = sorted( - study_available_ipd_, key=lambda x: x.created_at - ) - - return [s.to_dict() for s in sorted_study_available_ipd], 200 - - @api.doc( - description="An array of objects are expected within the payload with the keys demonstrated below to create an available-ipd" # noqa E501 - ) - @api.response(201, "Success") - @api.response(400, "Validation Error") - @api.expect(study_available) - def post(self, study_id: int): - """Create study available metadata""" - # Schema validation - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "identifier": {"type": "string", "minLength": 1}, - "type": { - "type": "string", - "enum": [ - "Individual Participant Data Set", - "Study Protocol", - "Statistical Analysis Plan", - "Informated Consent Form", - "Clinical Study Report", - "Analytic Code", - "Other", - ], - }, - "comment": {"type": "string"}, - "url": {"type": "string", "format": "uri", "minLength": 1}, - }, - "required": ["identifier", "type", "url", "comment"], - }, - "uniqueItems": True, - } - - try: - validate(request.json, schema) - except ValidationError as e: - print(e.message) - return e.message, 400 - - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not modify study", 403 - data: typing.Union[dict, typing.Any] = request.json - study_obj = model.Study.query.get(study_id) - - list_of_elements = [] - - for i in data: - if "id" in i and i["id"]: - study_available_ipd_ = model.StudyAvailableIpd.query.get(i["id"]) - study_available_ipd_.update(i) - else: - study_available_ipd_ = model.StudyAvailableIpd.from_data(study_obj, i) - model.db.session.add(study_available_ipd_) - list_of_elements.append(study_available_ipd_.to_dict()) - model.db.session.commit() - - return list_of_elements, 201 - - -@api.route("/study//metadata/available-ipd/") -class StudyLocationUpdate(Resource): - """Study Available Metadata""" - - @api.doc("delete available-ipd") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, available_ipd_id: int): - """Delete study available metadata""" - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 - study_available_ = model.StudyAvailableIpd.query.get(available_ipd_id) - - model.db.session.delete(study_available_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_central_contact.py b/apis/study_metadata/study_central_contact.py new file mode 100644 index 00000000..a3865e94 --- /dev/null +++ b/apis/study_metadata/study_central_contact.py @@ -0,0 +1,161 @@ +"""API routes for study contact metadata""" + +import typing + +from email_validator import EmailNotValidError, validate_email +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import FormatChecker, ValidationError, validate + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_contact = api.model( + "StudyCentralContact", + { + "id": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "degree": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_scheme": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + "affiliation": fields.String(required=True), + "affiliation_identifier": fields.String(required=True), + "affiliation_identifier_scheme": fields.String(required=True), + "affiliation_identifier_scheme_uri": fields.String(required=True), + "phone": fields.String(required=True), + "phone_ext": fields.String(required=True), + "email_address": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/central-contact") +class StudyCentralContactResource(Resource): + """Study Contact Metadata""" + + @api.doc("contact") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(study_contact) + def get(self, study_id: int): + """Get study contact metadata""" + study_ = model.Study.query.get(study_id) + + study_central_contact_ = study_.study_central_contact + # sorted_study_contact = sorted(study_central_contact_, key=lambda x: x.created_at) + # return [s.to_dict() for s in sorted_study_contact if s.central_contact], 200 + + return [s.to_dict() for s in study_central_contact_], 200 + + @api.response(201, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int): + """Create study contact metadata""" + + def validate_is_valid_email(instance): + email_address = instance + + try: + validate_email(email_address) + return True + except EmailNotValidError as e: + raise ValidationError("Invalid email address format") from e + + # Schema validation + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "required": [ + "first_name", + "last_name", + "affiliation", + "phone", + "phone_ext", + "email_address", + ], + "properties": { + "id": {"type": "string"}, + "first_name": {"type": "string", "minLength": 1}, + "last_name": {"type": "string", "minLength": 1}, + "degree": {"type": "string", "minLength": 1}, + "identifier": {"type": "string", "minLength": 1}, + "identifier_scheme": {"type": "string", "minLength": 1}, + "identifier_scheme_uri": {"type": "string", "minLength": 1}, + "affiliation": {"type": "string", "minLength": 1}, + "affiliation_identifier": { + "type": "string", + "minLength": 1, + }, + "affiliation_identifier_scheme": { + "type": "string", + }, + "affiliation_identifier_scheme_uri": {"type": "string"}, + "phone": {"type": "string"}, + "phone_ext": {"type": "string"}, + "email_address": {"type": "string"}, + # "email_address": {"type": "string", "format": "email"}, + }, + }, + "uniqueItems": True, + } + + format_checker = FormatChecker() + format_checker.checks("email")(validate_is_valid_email) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + return e.message, 400 + + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not modify study", 403 + data: typing.Union[dict, typing.Any] = request.json + + study_obj = model.Study.query.get(study_id) + + list_of_elements = [] + + for i in data: + if "id" in i and i["id"]: + study_central_contact_ = model.StudyCentralContact.query.get(i["id"]) + study_central_contact_.update(i) + list_of_elements.append(study_central_contact_.to_dict()) + elif "id" not in i or not i["id"]: + study_central_contact_ = model.StudyCentralContact.from_data( + study_obj, i + ) + model.db.session.add(study_central_contact_) + list_of_elements.append(study_central_contact_.to_dict()) + + model.db.session.commit() + + return list_of_elements, 201 + + +@api.route("/study//metadata/central-contact/") +class StudyCentralContactDelete(Resource): + """Study Central Contact Metadata""" + + @api.doc("Delete Study contacts") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, central_contact_id: int): + """Delete study central contact metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + study_central_contact_ = model.StudyCentralContact.query.get(central_contact_id) + + model.db.session.delete(study_central_contact_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/study_metadata/study_collaborators.py b/apis/study_metadata/study_collaborators.py new file mode 100644 index 00000000..63840079 --- /dev/null +++ b/apis/study_metadata/study_collaborators.py @@ -0,0 +1,109 @@ +"""API routes for study collaborators metadata""" + +import typing + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_collaborators = api.model( + "StudyCollaborators", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/collaborators") +class StudyCollaboratorsResource(Resource): + """Study Collaborators Metadata""" + + @api.doc("collaborators") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(study_collaborators) + def get(self, study_id: int): + """Get study collaborators metadata""" + study_ = model.Study.query.get(study_id) + study_collaborators_ = study_.study_collaborators + + return [collab.to_dict() for collab in study_collaborators_], 200 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int): + """updating study collaborators""" + # Schema validation + schema = { + "type": "array", + "additionalProperties": False, + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + }, + "required": [ + "name", + "identifier", + "identifier_scheme", + "identifier_scheme_uri", + ], + }, + } + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[dict, typing.Any] = request.json + + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not modify study", 403 + + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + study_collaborators_ = model.StudyCollaborators.query.get(i["id"]) + study_collaborators_.update(i) + else: + study_collaborators_ = model.StudyCollaborators.from_data(study_obj, i) + model.db.session.add(study_collaborators_) + list_of_elements.append(study_collaborators_.to_dict()) + model.db.session.commit() + + return list_of_elements, 201 + + +@api.route("/study//metadata/collaborators/") +class StudyLocationUpdate(Resource): + """delete Study Collaborators Metadata""" + + @api.doc("delete study collaborators") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, collaborator_id: int): + """Delete study collaborators metadata""" + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + study_collaborators_ = model.StudyCollaborators.query.get(collaborator_id) + + model.db.session.delete(study_collaborators_) + + model.db.session.commit() + + return Response(status=204) diff --git a/apis/study_metadata/study_conditions.py b/apis/study_metadata/study_conditions.py new file mode 100644 index 00000000..d0762ab1 --- /dev/null +++ b/apis/study_metadata/study_conditions.py @@ -0,0 +1,107 @@ +"""API routes for study other metadata""" + +import typing + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_other = api.model( + "StudyConditions", + { + "id": fields.String(required=True), + "name": fields.Boolean(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "condition_uri": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/conditions") +class StudyCondition(Resource): + """Study Conditions Metadata""" + + @api.doc("conditions") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(study_other) + def get(self, study_id: int): + """Get study conditions metadata""" + study_ = model.Study.query.get(study_id) + + study_conditions = study_.study_conditions + + return [s.to_dict() for s in study_conditions], 200 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int): + """Create study condition metadata""" + # Schema validation + schema = { + "type": "array", + "additionalProperties": False, + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string", "minLength": 1}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "condition_uri": {"type": "string"}, + }, + "required": ["name", "classification_code", "condition_uri"], + }, + } + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not modify study", 403 + + data: typing.Union[dict, typing.Any] = request.json + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + study_conditions_ = model.StudyConditions.query.get(i["id"]) + if not study_conditions_: + return f"Study condition {i['id']} Id is not found", 404 + study_conditions_.update(i) + list_of_elements.append(study_conditions_.to_dict()) + elif "id" not in i or not i["id"]: + study_conditions_ = model.StudyConditions.from_data(study_obj, i) + model.db.session.add(study_conditions_) + list_of_elements.append(study_conditions_.to_dict()) + model.db.session.commit() + return list_of_elements, 201 + + +@api.route("/study//metadata/conditions/") +class StudyConditionsUpdate(Resource): + """Study Conditions Metadata update""" + + @api.doc("Delete Study Identifications") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, condition_id: int): + """Delete study conditions metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + + study_conditions_ = model.StudyConditions.query.get(condition_id) + + model.db.session.delete(study_conditions_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/study_metadata/study_contact.py b/apis/study_metadata/study_contact.py deleted file mode 100644 index 8c02108c..00000000 --- a/apis/study_metadata/study_contact.py +++ /dev/null @@ -1,144 +0,0 @@ -"""API routes for study contact metadata""" - -import typing - -from email_validator import EmailNotValidError, validate_email -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import FormatChecker, ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_contact = api.model( - "StudyContact", - { - "id": fields.String(required=True), - "name": fields.String(required=True), - "affiliation": fields.String(required=True), - "role": fields.String(required=True), - "phone": fields.String(required=True), - "phone_ext": fields.String(required=True), - "email_address": fields.String(required=True), - "central_contact": fields.Boolean(required=True), - }, -) - - -@api.route("/study//metadata/central-contact") -class StudyContactResource(Resource): - """Study Contact Metadata""" - - @api.doc("contact") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(study_contact) - def get(self, study_id: int): - """Get study contact metadata""" - study_ = model.Study.query.get(study_id) - - study_contact_ = study_.study_contact - - sorted_study_contact = sorted(study_contact_, key=lambda x: x.created_at) - - return [s.to_dict() for s in sorted_study_contact if s.central_contact], 200 - - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """Create study contact metadata""" - - def validate_is_valid_email(instance): - email_address = instance - - try: - validate_email(email_address) - return True - except EmailNotValidError as e: - raise ValidationError("Invalid email address format") from e - - # Schema validation - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "required": [ - "name", - "affiliation", - "phone", - "phone_ext", - "email_address", - ], - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "affiliation": {"type": "string", "minLength": 1}, - "role": {"type": "string", "minLength": 1}, - "phone": { - "type": "string", - "minLength": 1, - }, - "phone_ext": { - "type": "string", - }, - "email_address": {"type": "string", "format": "email"}, - "central_contact": {"type": "boolean"}, - }, - }, - "uniqueItems": True, - } - - format_checker = FormatChecker() - format_checker.checks("email")(validate_is_valid_email) - - try: - validate( - instance=request.json, schema=schema, format_checker=format_checker - ) - except ValidationError as e: - return e.message, 400 - - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not modify study", 403 - data: typing.Union[dict, typing.Any] = request.json - - study_obj = model.Study.query.get(study_id) - - list_of_elements = [] - - for i in data: - if "id" in i and i["id"]: - study_contact_ = model.StudyContact.query.get(i["id"]) - study_contact_.update(i) - list_of_elements.append(study_contact_.to_dict()) - elif "id" not in i or not i["id"]: - study_contact_ = model.StudyContact.from_data(study_obj, i, None, True) - model.db.session.add(study_contact_) - list_of_elements.append(study_contact_.to_dict()) - - model.db.session.commit() - - return list_of_elements, 201 - - @api.route("/study//metadata/central-contact/") - class StudyContactUpdate(Resource): - """Study Contact Metadata""" - - @api.doc("Delete Study contacts") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, central_contact_id: int): - """Delete study contact metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - study_contact_ = model.StudyContact.query.get(central_contact_id) - - model.db.session.delete(study_contact_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 3d3cf0be..8a8fb0f8 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -32,6 +32,7 @@ "bio_spec_description": fields.String(required=True), "target_duration": fields.String(required=True), "number_groups_cohorts": fields.Integer(required=True), + "is_patient_registry": fields.String(required=True), }, ) @@ -160,6 +161,7 @@ def put(self, study_id: int): "bio_spec_description": {"type": ["string", "null"]}, "target_duration": {"type": ["string", "null"]}, "number_groups_cohorts": {"type": ["integer", "null"]}, + "is_patient_registry": {"type": ["string", "null"]}, }, } @@ -201,7 +203,6 @@ def put(self, study_id: int): "enrollment_count", "enrollment_type", "target_duration", - "number_groups_cohorts", ] for field in required_fields: diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index ee4d6af0..7d2bebf7 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -13,7 +13,7 @@ "StudyEligibility", { "id": fields.String(required=True), - "gender": fields.String(required=True), + "sex": fields.String(required=True), "gender_based": fields.String(required=True), "gender_description": fields.String(required=True), "minimum_age_value": fields.Integer(required=True), @@ -54,13 +54,13 @@ def put(self, study_id: int): "type": "object", "additionalProperties": False, "required": [ - "gender", + "sex", "gender_based", "minimum_age_value", "maximum_age_value", ], "properties": { - "gender": {"type": "string", "enum": ["All", "Female", "Male"]}, + "sex": {"type": "string", "enum": ["All", "Female", "Male"]}, "gender_based": {"type": "string", "enum": ["Yes", "No"]}, "gender_description": {"type": "string"}, "minimum_age_value": {"type": "integer"}, diff --git a/apis/study_metadata/study_ipdsharing.py b/apis/study_metadata/study_ipdsharing.py deleted file mode 100644 index 27b7e166..00000000 --- a/apis/study_metadata/study_ipdsharing.py +++ /dev/null @@ -1,106 +0,0 @@ -"""API routes for study ipdsharing metadata""" - -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_ipdsharing = api.model( - "StudyIpdsharing", - { - "id": fields.String(required=True), - "ipd_sharing": fields.String(required=True), - "ipd_sharing_description": fields.String(required=True), - "ipd_sharing_info_type_list": fields.List(fields.String, required=True), - "ipd_sharing_time_frame": fields.String(required=True), - "ipd_sharing_access_criteria": fields.String(required=True), - "ipd_sharing_url": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/ipdsharing") -class StudyIpdsharingResource(Resource): - """Study Ipd sharing Metadata""" - - @api.doc("ipdsharing") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") - @api.marshal_with(study_ipdsharing) - def get(self, study_id: int): - """Get study ipdsharing metadata""" - study_ = model.Study.query.get(study_id) - - return study_.study_ipdsharing.to_dict(), 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int): - """Create study ipdsharing metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "ipd_sharing": {"type": "string", "enum": ["Yes", "No", "Undecided"]}, - "ipd_sharing_description": {"type": "string"}, - "ipd_sharing_info_type_list": { - "type": "array", - "items": { - "type": "string", - "enum": [ - "Study Protocol", - "Statistical Analysis Plan (SAP)", - "Informed Consent Form (ICF)", - "Clinical Study Report (CSR)", - "Analytical Code", - ], - }, - "uniqueItems": True, - }, - "ipd_sharing_time_frame": {"type": "string"}, - "ipd_sharing_access_criteria": {"type": "string"}, - "ipd_sharing_url": {"type": "string", "format": "uri"}, - }, - "required": [ - "ipd_sharing", - "ipd_sharing_description", - "ipd_sharing_info_type_list", - "ipd_sharing_time_frame", - "ipd_sharing_access_criteria", - "ipd_sharing_url", - ], - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - if data["ipd_sharing"] == "Yes": - required_fields = [ - "ipd_sharing_description", - "ipd_sharing_info_type_list", - "ipd_sharing_time_frame", - "ipd_sharing_access_criteria", - "ipd_sharing_url", - ] - - for field in required_fields: - if field not in data: - return f"Field {field} is required", 400 - - study_ = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_): - return "Access denied, you can not modify study", 403 - study_.study_ipdsharing.update(request.json) - model.db.session.commit() - return study_.study_ipdsharing.to_dict(), 200 diff --git a/apis/study_metadata/study_keywords.py b/apis/study_metadata/study_keywords.py new file mode 100644 index 00000000..33bcb27d --- /dev/null +++ b/apis/study_metadata/study_keywords.py @@ -0,0 +1,107 @@ +"""API routes for study other metadata""" + +import typing + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_keywords = api.model( + "StudyKeywords", + { + "id": fields.String(required=True), + "name": fields.Boolean(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "keyword_uri": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/keywords") +class StudyKeywords(Resource): + """Study Keywords Metadata""" + + @api.doc("keywords") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(study_other) + def get(self, study_id: int): + """Get study keywords metadata""" + study_ = model.Study.query.get(study_id) + study_keywords = study_.study_keywords + + return [k.to_dict() for k in study_keywords], 200 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int): + """Create study keywords metadata""" + # Schema validation + schema = { + "type": "array", + "additionalProperties": False, + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string", "minLength": 1}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "keyword_uri": {"type": "string"}, + }, + "required": ["name", "classification_code", "keyword_uri"], + }, + } + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not modify study", 403 + + data: typing.Union[dict, typing.Any] = request.json + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + study_keywords_ = model.StudyKeywords.query.get(i["id"]) + if not study_keywords_: + return f"Study keywords {i['id']} Id is not found", 404 + study_keywords_.update(i) + list_of_elements.append(study_keywords_.to_dict()) + elif "id" not in i or not i["id"]: + study_keywords_ = model.StudyKeywords.from_data(study_obj, i) + model.db.session.add(study_keywords_) + list_of_elements.append(study_keywords_.to_dict()) + model.db.session.commit() + return list_of_elements, 201 + + +@api.route("/study//metadata/keywords/") +class StudyKeywordsDelete(Resource): + """Study keywords Metadata update""" + + @api.doc("Delete Study Keywords") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, keyword_id: int): + """Delete study conditions metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + + study_keywords_ = model.StudyKeywords.query.get(keyword_id) + + model.db.session.delete(study_keywords_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/study_metadata/study_link.py b/apis/study_metadata/study_link.py deleted file mode 100644 index 78cbf417..00000000 --- a/apis/study_metadata/study_link.py +++ /dev/null @@ -1,104 +0,0 @@ -"""API routes for study link metadata""" - -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_link = api.model( - "StudyLink", - { - "id": fields.String(required=True), - "url": fields.String(required=True), - "title": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/link") -class StudyLinkResource(Resource): - """Study Link Metadata""" - - @api.doc("link") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") - @api.marshal_with(study_link) - def get(self, study_id: int): - """Get study link metadata""" - study_ = model.Study.query.get(study_id) - study_link_ = study_.study_link - sorted_study_link_ = sorted(study_link_, key=lambda x: x.created_at) - return [s.to_dict() for s in sorted_study_link_], 200 - - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """Create study link metadata""" - # Schema validation - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "url": {"type": "string", "format": "uri", "minLength": 1}, - "title": {"type": "string"}, - }, - "required": ["url", "title"], - }, - "uniqueItems": True, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - data: typing.Union[dict, typing.Any] = request.json - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_link_ = model.StudyLink.query.get(i["id"]) - if study_link_ is None: - return f"Study link {i['id']} Id is not found", 404 - study_link_.update(i) - - else: - study_link_ = model.StudyLink.from_data(study_obj, i) - model.db.session.add(study_link_) - - list_of_elements.append(study_link_.to_dict()) - model.db.session.commit() - - return list_of_elements, 201 - - @api.route("/study//metadata/link/") - class StudyLinkUpdate(Resource): - """Study Link Metadata""" - - @api.doc("Delete study links") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, link_id: int): - """Delete study link metadata""" - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 - study_link_ = model.StudyLink.query.get(link_id) - - model.db.session.delete(study_link_) - - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py index 46696303..5355004d 100644 --- a/apis/study_metadata/study_other.py +++ b/apis/study_metadata/study_other.py @@ -1,156 +1,256 @@ -"""API routes for study other metadata""" +# """API routes for study other metadata""" +# +# import typing +# +# from flask import request, Response +# from flask_restx import Resource, fields +# from jsonschema import ValidationError, validate +# +# import model +# from apis.study_metadata_namespace import api +# +# from ..authentication import is_granted +# +# study_other = api.model( +# "StudyOther", +# { +# "id": fields.String(required=True), +# "oversight_has_dmc": fields.Boolean(required=True), +# "conditions": fields.String(required=True), +# "keywords": fields.String(required=True), +# "size": fields.String(required=True), +# }, +# ) +# +# +# @api.route("/study//metadata/oversight") +# class StudyOversightResource(Resource): +# """Study Oversight Metadata""" +# +# @api.doc("oversight") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(study_other) +# def get(self, study_id: int): +# """Get study oversight metadata""" +# study_ = model.Study.query.get(study_id) +# +# study_oversight_has_dmc = study_.study_oversight +# return study_oversight_has_dmc.to_dict(), 200 +# +# def put(self, study_id: int): +# """Update study oversight metadata""" +# # Schema validation +# schema = { +# "type": "object", +# "additionalProperties": False, +# "properties": {"oversight_has_dmc": {"type": "boolean"}}, +# "required": ["has_dmc"], +# } +# +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 +# +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not modify study", 403 +# data: typing.Union[dict, typing.Any] = request.json +# study_oversight_ = study_obj.study_oversight.update(data) +# model.db.session.commit() +# return study_obj.study_oversight.to_dict(), 200 -import typing -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +# @api.route("/study//metadata/conditions") +# class StudyCondition(Resource): +# """Study Conditions Metadata""" +# +# @api.doc("conditions") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(study_other) +# def get(self, study_id: int): +# """Get study conditions metadata""" +# study_ = model.Study.query.get(study_id) +# +# study_conditions = study_.study_conditions +# +# return [s.to_dict() for s in study_conditions], 200 +# +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def post(self, study_id: int): +# """Create study condition metadata""" +# # Schema validation +# # schema = { +# # "type": "array", +# # "additionalProperties": False, +# # "items": { +# # "type": "object", +# # "properties": { +# # "id": {"type": "string"}, +# # "facility": {"type": "string", "minLength": 1}, +# # "status": { +# # "type": "string", +# # "enum": [ +# # "Withdrawn", +# # "Recruiting", +# # "Active, not recruiting", +# # "Not yet recruiting", +# # "Suspended", +# # "Enrolling by invitation", +# # "Completed", +# # "Terminated", +# # ], +# # }, +# # "city": {"type": "string", "minLength": 1}, +# # "state": {"type": "string"}, +# # "zip": {"type": "string"}, +# # "country": {"type": "string", "minLength": 1}, +# # }, +# # "required": ["facility", "status", "city", "country"], +# # }, +# # } +# # +# # try: +# # validate(request.json, schema) +# # except ValidationError as e: +# # return e.message, 400 +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not modify study", 403 +# +# data: typing.Union[dict, typing.Any] = request.json +# list_of_elements = [] +# for i in data: +# if "id" in i and i["id"]: +# study_conditions_ = model.StudyConditions.query.get(i["id"]) +# if not study_conditions_: +# return f"Study condition {i['id']} Id is not found", 404 +# study_conditions_.update(i) +# list_of_elements.append(study_conditions_.to_dict()) +# elif "id" not in i or not i["id"]: +# study_conditions_ = model.StudyConditions.from_data(study_obj, i) +# model.db.session.add(study_conditions_) +# list_of_elements.append(study_conditions_.to_dict()) +# model.db.session.commit() +# return list_of_elements, 201 +# +# +# @api.route("/study//metadata/conditions/") +# class StudyConditionsUpdate(Resource): +# """Study Conditions Metadata update""" +# +# @api.doc("Delete Study Identifications") +# @api.response(204, "Success") +# @api.response(400, "Validation Error") +# def delete(self, study_id: int, condition_id: int): +# """Delete study conditions metadata""" +# study = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study): +# return "Access denied, you can not delete study", 403 +# +# study_conditions_ = model.StudyConditions.query.get(condition_id) +# +# model.db.session.delete(study_conditions_) +# model.db.session.commit() +# +# return Response(status=204) -import model -from apis.study_metadata_namespace import api -from ..authentication import is_granted - -study_other = api.model( - "StudyOther", - { - "id": fields.String(required=True), - "oversight_has_dmc": fields.Boolean(required=True), - "conditions": fields.String(required=True), - "keywords": fields.String(required=True), - "size": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/oversight") -class StudyOversightResource(Resource): - """Study Oversight Metadata""" - - @api.doc("oversight") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study oversight metadata""" - study_ = model.Study.query.get(study_id) - - study_oversight_has_dmc = study_.study_other.oversight_has_dmc - return {"oversight": study_oversight_has_dmc}, 200 - - def put(self, study_id: int): - """Update study oversight metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "properties": {"oversight_has_dmc": {"type": "boolean"}}, - "required": ["oversight_has_dmc"], - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - data: typing.Union[dict, typing.Any] = request.json - study_oversight = study_obj.study_other.oversight_has_dmc = data[ - "oversight_has_dmc" - ] - study_obj.touch() - model.db.session.commit() - - return study_oversight, 200 - - -@api.route("/study//metadata/conditions") -class StudyCondition(Resource): - """Study Conditions Metadata""" - - @api.doc("conditions") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study conditions metadata""" - study_ = model.Study.query.get(study_id) - - study_other_conditions = study_.study_other.conditions - - return study_other_conditions, 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int): - """Update study conditions metadata""" - # Schema validation - schema = { - "type": "array", - "items": {"type": "string", "minLength": 1}, - "minItems": 1, - "uniqueItems": True, - "additionalItems": False, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - study_obj.study_other.conditions = data - study_obj.touch() - model.db.session.commit() - - return study_obj.study_other.conditions, 200 - - -@api.route("/study//metadata/keywords") -class StudyKeywords(Resource): - """Study Keywords Metadata""" - - @api.doc("keywords") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study keywords metadata""" - study_ = model.Study.query.get(study_id) - - study_other_keywords = study_.study_other.keywords - - return study_other_keywords, 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int): - """Update study keywords metadata""" - # Schema validation - schema = { - "type": "array", - "items": {"type": "string", "minLength": 1}, - "minItems": 1, - "uniqueItems": True, - "additionalItems": False, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - study_obj.study_other.keywords = data - study_obj.touch() - model.db.session.commit() - - return study_obj.study_other.keywords, 200 +# @api.route("/study//metadata/keywords") +# class StudyKeywords(Resource): +# """Study Keywords Metadata""" +# +# @api.doc("keywords") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(study_other) +# def get(self, study_id: int): +# """Get study keywords metadata""" +# study_ = model.Study.query.get(study_id) +# +# study_keywords = study_.study_keywords +# +# return [k.to_dict() for k in study_keywords], 200 +# +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def post(self, study_id: int): +# """Create study condition metadata""" +# # Schema validation +# # schema = { +# # "type": "array", +# # "additionalProperties": False, +# # "items": { +# # "type": "object", +# # "properties": { +# # "id": {"type": "string"}, +# # "facility": {"type": "string", "minLength": 1}, +# # "status": { +# # "type": "string", +# # "enum": [ +# # "Withdrawn", +# # "Recruiting", +# # "Active, not recruiting", +# # "Not yet recruiting", +# # "Suspended", +# # "Enrolling by invitation", +# # "Completed", +# # "Terminated", +# # ], +# # }, +# # "city": {"type": "string", "minLength": 1}, +# # "state": {"type": "string"}, +# # "zip": {"type": "string"}, +# # "country": {"type": "string", "minLength": 1}, +# # }, +# # "required": ["facility", "status", "city", "country"], +# # }, +# # } +# # +# # try: +# # validate(request.json, schema) +# # except ValidationError as e: +# # return e.message, 400 +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not modify study", 403 +# +# data: typing.Union[dict, typing.Any] = request.json +# list_of_elements = [] +# for i in data: +# if "id" in i and i["id"]: +# study_keywords_ = model.StudyKeywords.query.get(i["id"]) +# if not study_keywords_: +# return f"Study keywords {i['id']} Id is not found", 404 +# study_keywords_.update(i) +# list_of_elements.append(study_keywords_.to_dict()) +# elif "id" not in i or not i["id"]: +# study_keywords_ = model.StudyKeywords.from_data(study_obj, i) +# model.db.session.add(study_keywords_) +# list_of_elements.append(study_keywords_.to_dict()) +# model.db.session.commit() +# return list_of_elements, 201 +# +# +# @api.route("/study//metadata/keywords/") +# class StudyKeywordsDelete(Resource): +# """Study keywords Metadata update""" +# +# @api.doc("Delete Study Keywords") +# @api.response(204, "Success") +# @api.response(400, "Validation Error") +# def delete(self, study_id: int, keyword_id: int): +# """Delete study conditions metadata""" +# study = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study): +# return "Access denied, you can not delete study", 403 +# +# study_keywords_ = model.StudyKeywords.query.get(keyword_id) +# +# model.db.session.delete(study_keywords_) +# model.db.session.commit() +# +# return Response(status=204) diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 79eb4bb5..6a11c576 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -57,18 +57,25 @@ def post(self, study_id: int): "additionalProperties": False, "properties": { "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "affiliation": {"type": "string", "minLength": 1}, - "role": { - "type": "string", - "enum": [ - "Study Chair", - "Study Director", - "Study Principal Investigator", - ], - }, + "first_name": {"type": "string"}, + "last_name": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + "affiliation": {"type": "string"}, + "affiliation_identifier": {"type": "string"}, + "affiliation_identifier_scheme": {"type": "string"}, + "affiliation_identifier_scheme_uri": {"type": "string"}, + "role": {"type": ["string", "null"]}, + "degree": {"type": "string"}, }, - "required": ["name", "affiliation", "role"], + "required": [ + "first_name", + "last_name", + "affiliation", + "affiliation_identifier", + "role", + ], }, "uniqueItems": True, } diff --git a/apis/study_metadata/study_oversight.py b/apis/study_metadata/study_oversight.py new file mode 100644 index 00000000..6184f9af --- /dev/null +++ b/apis/study_metadata/study_oversight.py @@ -0,0 +1,76 @@ +"""API routes for study other metadata""" + +import typing + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_other = api.model( + "StudyOversight", + { + "id": fields.String(required=True), + "oversight_has_dmc": fields.Boolean(required=True), + "conditions": fields.String(required=True), + "keywords": fields.String(required=True), + "size": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/oversight") +class StudyOversightResource(Resource): + """Study Oversight Metadata""" + + @api.doc("oversight") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(study_other) + def get(self, study_id: int): + """Get study oversight metadata""" + study_ = model.Study.query.get(study_id) + + study_oversight_has_dmc = study_.study_oversight + return study_oversight_has_dmc.to_dict(), 200 + + def put(self, study_id: int): + """Update study oversight metadata""" + # Schema validation + schema = { + "type": "object", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "fda_regulated_drug": {"type": "string", "minLength": 1}, + "fda_regulated_device": {"type": "string", "minLength": 1}, + "has_dmc": {"type": "string"}, + "human_subject_review_status": {"type": "string"}, + }, + "required": [ + "fda_regulated_drug", + "fda_regulated_device", + "has_dmc", + "human_subject_review_status", + ], + }, + "uniqueItems": True, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not modify study", 403 + data: typing.Union[dict, typing.Any] = request.json + study_obj.study_oversight.update(data) + model.db.session.commit() + return study_obj.study_oversight.to_dict(), 200 diff --git a/apis/study_metadata/study_reference.py b/apis/study_metadata/study_reference.py deleted file mode 100644 index 19e1b15d..00000000 --- a/apis/study_metadata/study_reference.py +++ /dev/null @@ -1,106 +0,0 @@ -"""API routes for study reference metadata""" - -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_reference = api.model( - "StudyReference", - { - "id": fields.String(required=True), - "identifier": fields.String(required=True), - "type": fields.String(required=True), - "title": fields.String(required=True), - "citation": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/reference") -class StudyReferenceResource(Resource): - """Study Reference Metadata""" - - @api.doc("reference") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") - @api.marshal_with(study_reference) - def get(self, study_id: int): - """Get study reference metadata""" - study_ = model.Study.query.get(study_id) - - study_reference_ = study_.study_reference - - sorted_study_reference = sorted(study_reference_, key=lambda x: x.created_at) - - return [s.to_dict() for s in sorted_study_reference], 200 - - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """Create study reference metadata""" - # Schema validation - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "identifier": {"type": "string"}, - "type": {"type": ["string", "null"]}, - "citation": {"type": "string", "minLength": 1}, - }, - "required": ["citation", "identifier", "type"], - }, - "uniqueItems": True, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return {"message": e.message}, 400 - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - data: typing.Union[dict, typing.Any] = request.json - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_reference_ = model.StudyReference.query.get(i["id"]) - study_reference_.update(i) - else: - study_reference_ = model.StudyReference.from_data(study_obj, i) - model.db.session.add(study_reference_) - list_of_elements.append(study_reference_.to_dict()) - model.db.session.commit() - - return list_of_elements, 201 - - @api.route("/study//metadata/reference/") - class StudyReferenceUpdate(Resource): - """Study Reference Metadata""" - - @api.doc("delete reference") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, reference_id: int): - """Delete study reference metadata""" - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 - study_reference_ = model.StudyReference.query.get(reference_id) - - model.db.session.delete(study_reference_) - - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_sponsors.py b/apis/study_metadata/study_sponsors.py new file mode 100644 index 00000000..1d4a66a3 --- /dev/null +++ b/apis/study_metadata/study_sponsors.py @@ -0,0 +1,164 @@ +"""API routes for study sponsors and collaborators metadata""" + +import typing + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_sponsors = api.model( + "StudySponsors", + { + "responsible_party_type": fields.String(required=True), + "responsible_party_investigator_first_name": fields.String(required=False), + "responsible_party_investigator_last_name": fields.String(required=True), + "responsible_party_investigator_title": fields.String(required=True), + "responsible_party_investigator_identifier_value": fields.String(required=True), + "responsible_party_investigator_identifier_scheme": fields.String( + required=True + ), + "responsible_party_investigator_identifier_scheme_uri": fields.String( + required=True + ), + "responsible_party_investigator_affiliation_name": fields.String(required=True), + "responsible_party_investigator_affiliation_identifier_scheme": fields.String( + required=True + ), + "responsible_party_investigator_affiliation_identifier_value": fields.String( + required=True + ), + "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String( + required=True + ), + "lead_sponsor_name": fields.String(required=True), + "lead_sponsor_identifier": fields.String(required=True), + "lead_sponsor_identifier_scheme": fields.String(required=True), + "lead_sponsor_identifier_scheme_uri": fields.String(required=True), + }, +) + + +@api.route("/study//metadata/sponsor") +class StudySponsorsResource(Resource): + """Study Sponsors Metadata""" + + @api.doc("sponsors") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(study_sponsors) + def get(self, study_id: int): + """Get study sponsors metadata""" + study_ = model.Study.query.get(study_id) + + study_sponsors_ = study_.study_sponsors + + return study_sponsors_.to_dict(), 200 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + def put(self, study_id: int): + """Update study sponsors metadata""" + # Schema validation + schema = { + "type": "object", + "additionalProperties": False, + "required": [ + "responsible_party_type", + "lead_sponsor_name", + "responsible_party_investigator_last_name", + "responsible_party_investigator_first_name", + "responsible_party_investigator_title", + ], + "properties": { + "responsible_party_type": { + "type": ["string", "null"], + "enum": [ + "Sponsor", + "Principal Investigator", + "Sponsor-Investigator", + ], + }, + "responsible_party_investigator_first_name": { + "type": "string", + }, + "responsible_party_investigator_last_name": { + "type": "string", + }, + "responsible_party_investigator_title": { + "type": "string", + }, + "responsible_party_investigator_identifier_value": { + "type": "string", + }, + "responsible_party_investigator_identifier_scheme": { + "type": "string", + }, + "responsible_party_investigator_identifier_scheme_uri": { + "type": "string", + }, + "responsible_party_investigator_affiliation_name": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_scheme": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_value": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_scheme_uri": { + "type": "string", + }, + "lead_sponsor_name": {"type": "string"}, + "lead_sponsor_identifier": {"type": "string"}, + "lead_sponsor_identifier_scheme": {"type": "string"}, + "lead_sponsor_identifier_scheme_uri": { + "type": "string", + }, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: typing.Union[dict, typing.Any] = request.json + if data["responsible_party_type"] in [ + "Principal Investigator", + "Sponsor-Investigator", + ]: + if not data["responsible_party_investigator_last_name"]: + return "Principal Investigator name is required", 400 + if not data["responsible_party_investigator_first_name"]: + return "Principal Investigator name is required", 400 + + if not data["responsible_party_investigator_title"]: + return "Principal Investigator title is required", 400 + + investigator_first_name = data["responsible_party_investigator_first_name"] + investigator_last_name = data["responsible_party_investigator_last_name"] + investigator_title = data["responsible_party_investigator_title"] + + if investigator_first_name == "": + return "Principal Investigator first name cannot be empty", 400 + if investigator_last_name == "": + return "Principal Investigator last name cannot be empty", 400 + if investigator_title == "": + return "Principal Investigator title cannot be empty", 400 + + study_ = model.Study.query.get(study_id) + + # Check user permissions + if not is_granted("study_metadata", study_): + return "Access denied, you can not modify study", 403 + + study_.study_sponsors.update(data) + + model.db.session.commit() + + return study_.study_sponsors.to_dict(), 200 diff --git a/apis/study_metadata/study_sponsors_collaborators.py b/apis/study_metadata/study_sponsors_collaborators.py deleted file mode 100644 index 00d9885b..00000000 --- a/apis/study_metadata/study_sponsors_collaborators.py +++ /dev/null @@ -1,172 +0,0 @@ -"""API routes for study sponsors and collaborators metadata""" - -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_sponsors = api.model( - "StudySponsors", - { - "id": fields.String(required=True), - "responsible_party_type": fields.String(required=True), - "responsible_party_investigator_name": fields.String(required=True), - "responsible_party_investigator_title": fields.String(required=True), - "responsible_party_investigator_affiliation": fields.String(required=True), - "lead_sponsor_name": fields.String(required=True), - }, -) - - -study_collaborators = api.model( - "StudyCollaborators", - { - "collaborator_name": fields.List(fields.String, required=True), - }, -) - - -@api.route("/study//metadata/sponsors") -class StudySponsorsResource(Resource): - """Study Sponsors Metadata""" - - @api.doc("sponsors") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(study_sponsors) - def get(self, study_id: int): - """Get study sponsors metadata""" - study_ = model.Study.query.get(study_id) - - study_sponsors_collaborators_ = study_.study_sponsors_collaborators - - return study_sponsors_collaborators_.to_dict(), 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int): - """Update study sponsors metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "required": [ - "responsible_party_type", - "lead_sponsor_name", - "responsible_party_investigator_name", - "responsible_party_investigator_title", - "responsible_party_investigator_affiliation", - ], - "properties": { - "responsible_party_type": { - "type": "string", - "minLength": 1, - "enum": [ - "Sponsor", - "Principal Investigator", - "Sponsor-Investigator", - ], - }, - "responsible_party_investigator_name": { - "type": "string", - }, - "responsible_party_investigator_title": { - "type": "string", - }, - "responsible_party_investigator_affiliation": { - "type": "string", - }, - "lead_sponsor_name": {"type": "string", "minLength": 1}, - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - if data["responsible_party_type"] in [ - "Principal Investigator", - "Sponsor-Investigator", - ]: - if not data["responsible_party_investigator_name"]: - return ("Principal Investigator name is required", 400) - - if not data["responsible_party_investigator_title"]: - return ("Principal Investigator title is required", 400) - - if not data["responsible_party_investigator_affiliation"]: - return ("Principal Investigator affiliation is required", 400) - - investigator_name = data["responsible_party_investigator_name"] - investigator_title = data["responsible_party_investigator_title"] - investigator_affiliation = data[ - "responsible_party_investigator_affiliation" - ] - - if investigator_name == "": - return ("Principal Investigator name cannot be empty", 400) - if investigator_title == "": - return ("Principal Investigator title cannot be empty", 400) - if investigator_affiliation == "": - return ("Principal Investigator affiliation cannot be empty", 400) - - study_ = model.Study.query.get(study_id) - - # Check user permissions - if not is_granted("study_metadata", study_): - return "Access denied, you can not modify study", 403 - - study_.study_sponsors_collaborators.update(request.json) - - model.db.session.commit() - - return study_.study_sponsors_collaborators.to_dict(), 200 - - -@api.route("/study//metadata/collaborators") -class StudyCollaboratorsResource(Resource): - """Study Collaborators Metadata""" - - @api.doc("collaborators") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_collaborators) - def get(self, study_id: int): - """Get study collaborators metadata""" - study_ = model.Study.query.get(study_id) - - study_collaborators_ = study_.study_sponsors_collaborators.collaborator_name - - return study_collaborators_, 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int): - """updating study collaborators""" - # Schema validation - schema = { - "type": "array", - "items": {"type": "string", "minLength": 1}, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - study_obj.study_sponsors_collaborators.collaborator_name = data - study_obj.touch() - model.db.session.commit() - return study_obj.study_sponsors_collaborators.collaborator_name, 200 diff --git a/model/__init__.py b/model/__init__.py index 67fbe758..54b3be62 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -26,20 +26,21 @@ from .study_metadata.arm import Arm from .study_metadata.identifiers import Identifiers from .study_metadata.study_arm import StudyArm -from .study_metadata.study_available_ipd import StudyAvailableIpd -from .study_metadata.study_contact import StudyContact +from .study_metadata.study_central_contact import StudyCentralContact +from .study_metadata.study_collaborators import StudyCollaborators +from .study_metadata.study_conditions import StudyConditions from .study_metadata.study_description import StudyDescription from .study_metadata.study_design import StudyDesign from .study_metadata.study_eligibility import StudyEligibility from .study_metadata.study_identification import StudyIdentification from .study_metadata.study_intervention import StudyIntervention -from .study_metadata.study_ipdsharing import StudyIpdsharing -from .study_metadata.study_link import StudyLink +from .study_metadata.study_keywords import StudyKeywords from .study_metadata.study_location import StudyLocation +from .study_metadata.study_location_contact_list import StudyLocationContactList from .study_metadata.study_other import StudyOther from .study_metadata.study_overall_official import StudyOverallOfficial -from .study_metadata.study_reference import StudyReference -from .study_metadata.study_sponsors_collaborators import StudySponsorsCollaborators +from .study_metadata.study_oversight import StudyOversight +from .study_metadata.study_sponsors import StudySponsors from .study_metadata.study_status import StudyStatus from .study_redcap import StudyRedcap from .token_blacklist import TokenBlacklist @@ -72,22 +73,23 @@ "DatasetRelatedIdentifier", "DatasetDescription", "StudyArm", - "StudyAvailableIpd", - "StudyContact", + "StudySponsors", + "StudyCentralContact", "StudyDescription", "StudyDesign", "StudyEligibility", "StudyIdentification", "StudyIntervention", - "StudyIpdsharing", - "StudyLink", "StudyLocation", + "StudyLocationContactList", "StudyOther", + "StudyKeywords", + "StudyConditions", + "StudyCollaborators", + "StudyOversight", "StudyOverallOfficial", "StudyRedcap", "StudyDashboard", - "StudyReference", - "StudySponsorsCollaborators", "StudyStatus", "Identifiers", "Arm", diff --git a/model/dataset_metadata/dataset_rights.py b/model/dataset_metadata/dataset_rights.py index 6df32e51..7cb6c164 100644 --- a/model/dataset_metadata/dataset_rights.py +++ b/model/dataset_metadata/dataset_rights.py @@ -10,6 +10,7 @@ def __init__(self, dataset): self.id = str(uuid.uuid4()) self.dataset = dataset self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.identifier_scheme_uri = "" __tablename__ = "dataset_rights" id = db.Column(db.CHAR(36), primary_key=True) @@ -18,6 +19,7 @@ def __init__(self, dataset): uri = db.Column(db.String, nullable=False) identifier = db.Column(db.String, nullable=False) identifier_scheme = db.Column(db.String, nullable=False) + identifier_scheme_uri = db.Column(db.String, nullable=False) license_text = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) @@ -32,6 +34,7 @@ def to_dict(self): "uri": self.uri, "identifier": self.identifier, "identifier_scheme": self.identifier_scheme, + "identifier_scheme_uri": self.identifier_scheme_uri, "created_at": self.created_at, "license_text": self.license_text, } @@ -54,5 +57,8 @@ def update(self, data: dict): self.uri = data["uri"] self.identifier = data["identifier"] self.identifier_scheme = data["identifier_scheme"] + self.identifier_scheme_uri = ( + data["identifier_scheme_uri"] if "identifier_scheme_uri" in data else "" + ) self.license_text = data["license_text"] self.dataset.touch_dataset() diff --git a/model/study.py b/model/study.py index 59e20332..d09312a1 100644 --- a/model/study.py +++ b/model/study.py @@ -19,21 +19,23 @@ class Study(db.Model): # type: ignore def __init__(self): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() - # + self.study_status = model.StudyStatus(self) - self.study_sponsors_collaborators = model.StudySponsorsCollaborators(self) + self.study_sponsors = model.StudySponsors(self) self.study_design = model.StudyDesign(self) self.study_eligibility = model.StudyEligibility(self) - self.study_ipdsharing = model.StudyIpdsharing(self) self.study_description = model.StudyDescription(self) self.study_identification.append(model.StudyIdentification(self, False)) self.study_other = model.StudyOther(self) - # self.study_contributors = model.StudyContributor(self) + self.study_oversight = model.StudyOversight(self) __tablename__ = "study" id = db.Column(db.CHAR(36), primary_key=True) - title = db.Column(db.String, nullable=False) + + title = db.Column(db.String(300), nullable=False) image = db.Column(db.String, nullable=False) + acronym = db.Column(db.String(14), nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) @@ -64,13 +66,14 @@ def __init__(self): back_populates="study", cascade="all, delete", ) - study_available_ipd = db.relationship( - "StudyAvailableIpd", + study_sponsors = db.relationship( + "StudySponsors", + uselist=False, back_populates="study", cascade="all, delete", ) - study_contact = db.relationship( - "StudyContact", + study_central_contact = db.relationship( + "StudyCentralContact", back_populates="study", cascade="all, delete", ) @@ -110,17 +113,7 @@ def __init__(self): back_populates="study", cascade="all, delete", ) - study_ipdsharing = db.relationship( - "StudyIpdsharing", - uselist=False, - back_populates="study", - cascade="all, delete", - ) - study_link = db.relationship( - "StudyLink", - back_populates="study", - cascade="all, delete", - ) + study_location = db.relationship( "StudyLocation", back_populates="study", @@ -132,22 +125,32 @@ def __init__(self): back_populates="study", cascade="all, delete", ) - study_overall_official = db.relationship( - "StudyOverallOfficial", + study_keywords = db.relationship( + "StudyKeywords", + back_populates="study", + cascade="all, delete", + ) + study_conditions = db.relationship( + "StudyConditions", back_populates="study", cascade="all, delete", ) - study_reference = db.relationship( - "StudyReference", + study_collaborators = db.relationship( + "StudyCollaborators", back_populates="study", cascade="all, delete", ) - study_sponsors_collaborators = db.relationship( - "StudySponsorsCollaborators", + study_oversight = db.relationship( + "StudyOversight", uselist=False, back_populates="study", cascade="all, delete", ) + study_overall_official = db.relationship( + "StudyOverallOfficial", + back_populates="study", + cascade="all, delete", + ) study_status = db.relationship( "StudyStatus", uselist=False, @@ -167,6 +170,7 @@ def to_dict(self): return { "id": self.id, "title": self.title, + "acronym": self.acronym, "image": self.image, "created_at": self.created_at, "updated_on": self.updated_on, @@ -188,11 +192,8 @@ def to_dict_study_metadata(self): return { "arms": [i.to_dict_metadata() for i in self.study_arm], # type: ignore - "available_ipd": [ - i.to_dict_metadata() for i in self.study_available_ipd # type: ignore - ], - "contacts": [ - i.to_dict_metadata() for i in self.study_contact # type: ignore + "central_contacts": [ + i.to_dict_metadata() for i in self.study_central_contact # type: ignore ], "description": self.study_description.to_dict_metadata(), "design": self.study_design.to_dict(), @@ -206,8 +207,6 @@ def to_dict_study_metadata(self): "interventions": [ i.to_dict_metadata() for i in self.study_intervention # type: ignore ], - "ipd_sharing": self.study_ipdsharing.to_dict_metadata(), - "links": [i.to_dict_metadata() for i in self.study_link], # type: ignore "locations": [ i.to_dict_metadata() for i in self.study_location # type: ignore ], @@ -215,15 +214,18 @@ def to_dict_study_metadata(self): i.to_dict_metadata() for i in self.study_overall_official # type: ignore ], - "references": [ - i.to_dict_metadata() for i in self.study_reference # type: ignore + "sponsors": self.study_sponsors.to_dict_metadata(), + "collaborators": [ + i.to_dict_metadata() for i in self.study_collaborators # type: ignore ], - "sponsors": self.study_sponsors_collaborators.to_dict_metadata(), - "collaborators": self.study_sponsors_collaborators.collaborator_name, "status": self.study_status.to_dict_metadata(), - "oversight": self.study_other.oversight_has_dmc, - "conditions": self.study_other.conditions, - "keywords": self.study_other.keywords, + "oversight": self.study_oversight.to_dict(), + "conditions": [ + i.to_dict_metadata() for i in self.study_conditions # type: ignore + ], + "keywords": [ + i.to_dict_metadata() for i in self.study_keywords # type: ignore + ], } @staticmethod @@ -243,6 +245,7 @@ def update(self, data: dict): self.title = data["title"] self.image = data["image"] + self.acronym = data["acronym"] self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() def validate(self): diff --git a/model/study_metadata/study_available_ipd.py b/model/study_metadata/study_available_ipd.py deleted file mode 100644 index 5ae9913c..00000000 --- a/model/study_metadata/study_available_ipd.py +++ /dev/null @@ -1,65 +0,0 @@ -import datetime -import uuid -from datetime import timezone - -import model - -from ..db import db - - -class StudyAvailableIpd(db.Model): # type: ignore - """A study is a collection of datasets and participants""" - - def __init__(self, study): - self.id = str(uuid.uuid4()) - self.study = study - self.created_at = datetime.datetime.now(timezone.utc).timestamp() - - __tablename__ = "study_available_ipd" - - id = db.Column(db.CHAR(36), primary_key=True) - identifier = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=True) - url = db.Column(db.String, nullable=False) - comment = db.Column(db.String, nullable=False) - created_at = db.Column(db.BigInteger, nullable=False) - - study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False - ) - study = db.relationship("Study", back_populates="study_available_ipd") - - def to_dict(self): - """Converts the study to a dictionary""" - return { - "id": self.id, - "identifier": self.identifier, - "type": self.type, - "url": self.url, - "comment": self.comment, - "created_at": self.created_at, - } - - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return {"identifier": self.identifier, "url": self.url} - - @staticmethod - def from_data(study: model.StudyArm, data: dict): - """Creates a new study metadata from a dictionary""" - study_available = StudyAvailableIpd(study) - study_available.update(data) - return study_available - - def update(self, data: dict): - """Updates the study metadata from a dictionary""" - self.identifier = data["identifier"] - self.type = data["type"] - self.url = data["url"] - self.comment = data["comment"] - self.study.touch() - - def validate(self): - """Validates the study""" - violations: list = [] - return violations diff --git a/model/study_metadata/study_central_contact.py b/model/study_metadata/study_central_contact.py new file mode 100644 index 00000000..9822ecc0 --- /dev/null +++ b/model/study_metadata/study_central_contact.py @@ -0,0 +1,103 @@ +import datetime +import uuid +from datetime import timezone + +from model import Study + +from ..db import db + + +class StudyCentralContact(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study: Study): + self.id = str(uuid.uuid4()) + self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + + __tablename__ = "study_central_contact" + + id = db.Column(db.CHAR(36), primary_key=True) + first_name = db.Column(db.String, nullable=False) + last_name = db.Column(db.String, nullable=False) + degree = db.Column(db.String, nullable=False) + identifier = db.Column(db.String, nullable=False) + identifier_scheme = db.Column(db.String, nullable=False) + identifier_scheme_uri = db.Column(db.String, nullable=False) + affiliation = db.Column(db.String, nullable=False) + affiliation_identifier = db.Column(db.String, nullable=False) + affiliation_identifier_scheme = db.Column(db.String, nullable=False) + affiliation_identifier_scheme_uri = db.Column(db.String, nullable=False) + phone = db.Column(db.String, nullable=False) + phone_ext = db.Column(db.String, nullable=False) + email_address = db.Column(db.String, nullable=False) + + created_at = db.Column(db.BigInteger, nullable=False) + + study_id = db.Column( + db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + ) + study = db.relationship("Study", back_populates="study_central_contact") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "degree": self.degree, + "identifier": self.identifier, + "identifier_scheme": self.identifier_scheme, + "identifier_scheme_uri": self.identifier_scheme_uri, + "affiliation": self.affiliation, + "affiliation_identifier": self.affiliation_identifier, + "affiliation_identifier_scheme": self.affiliation_identifier_scheme, + "affiliation_identifier_scheme_uri": self.affiliation_identifier_scheme_uri, + "phone": self.phone, + "phone_ext": self.phone_ext, + "email_address": self.email_address, + "created_at": self.created_at, + } + + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "affiliation": self.affiliation, + "phone": self.phone, + "email_address": self.email_address, + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_contact = StudyCentralContact(study) + study_contact.update(data) + + return study_contact + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.first_name = data["first_name"] + self.last_name = data["last_name"] + self.degree = data["degree"] + self.identifier = data["identifier"] + self.identifier_scheme = data["identifier_scheme"] + self.identifier_scheme_uri = data["identifier_scheme_uri"] + self.affiliation = data["affiliation"] + self.affiliation_identifier = data["affiliation_identifier"] + self.affiliation_identifier_scheme = data["affiliation_identifier_scheme"] + self.affiliation_identifier_scheme_uri = data[ + "affiliation_identifier_scheme_uri" + ] + self.phone = data["phone"] + self.phone_ext = data["phone_ext"] + self.email_address = data["email_address"] + self.study.touch() + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations: list = [] + return violations diff --git a/model/study_metadata/study_reference.py b/model/study_metadata/study_collaborators.py similarity index 60% rename from model/study_metadata/study_reference.py rename to model/study_metadata/study_collaborators.py index cce05886..bcd6f64b 100644 --- a/model/study_metadata/study_reference.py +++ b/model/study_metadata/study_collaborators.py @@ -7,7 +7,7 @@ from ..db import db -class StudyReference(db.Model): # type: ignore +class StudyCollaborators(db.Model): # type: ignore """A study is a collection of datasets and participants""" def __init__(self, study): @@ -15,50 +15,53 @@ def __init__(self, study): self.study = study self.created_at = datetime.datetime.now(timezone.utc).timestamp() - __tablename__ = "study_reference" + __tablename__ = "study_collaborators" id = db.Column(db.CHAR(36), primary_key=True) + name = db.Column(db.String, nullable=False) identifier = db.Column(db.String, nullable=False) - type = db.Column(db.String, nullable=True) - citation = db.Column(db.String, nullable=False) + scheme = db.Column(db.String, nullable=False) + scheme_uri = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + nullable=False, ) - study = db.relationship("Study", back_populates="study_reference") + study = db.relationship("Study", back_populates="study_collaborators") def to_dict(self): """Converts the study to a dictionary""" return { "id": self.id, + "name": self.name, "identifier": self.identifier, - "type": self.type, - "citation": self.citation, + "scheme": self.scheme, + "scheme_uri": self.scheme_uri, "created_at": self.created_at, } def to_dict_metadata(self): """Converts the study metadata to a dictionary""" return { - "id": self.id, - "identifier": self.identifier, - "citation": self.citation, + "name": self.name, } @staticmethod def from_data(study: Study, data: dict): """Creates a new study from a dictionary""" - study_reference = StudyReference(study) - study_reference.update(data) + study_keywords = StudyCollaborators(study) + study_keywords.update(data) - return study_reference + return study_keywords def update(self, data: dict): """Updates the study from a dictionary""" + self.name = data["name"] self.identifier = data["identifier"] - self.type = data["type"] - self.citation = data["citation"] + self.scheme = data["identifier_scheme"] + self.scheme_uri = data["identifier_scheme_uri"] self.study.touch() def validate(self): diff --git a/model/study_metadata/study_conditions.py b/model/study_metadata/study_conditions.py new file mode 100644 index 00000000..6d0bf83b --- /dev/null +++ b/model/study_metadata/study_conditions.py @@ -0,0 +1,73 @@ +import datetime +import uuid +from datetime import timezone + +from model import Study + +from ..db import db + + +class StudyConditions(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.id = str(uuid.uuid4()) + self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + + __tablename__ = "study_conditions" + + id = db.Column(db.CHAR(36), primary_key=True) + name = db.Column(db.String, nullable=False) + classification_code = db.Column(db.String, nullable=False) + scheme = db.Column(db.String, nullable=False) + scheme_uri = db.Column(db.String, nullable=False) + condition_uri = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + nullable=False, + ) + study = db.relationship("Study", back_populates="study_conditions") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "name": self.name, + "classification_code": self.classification_code, + "scheme": self.scheme, + "scheme_uri": self.scheme_uri, + "condition_uri": self.condition_uri, + "created_at": self.created_at, + } + + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "name": self.name, + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_keywords = StudyConditions(study) + study_keywords.update(data) + + return study_keywords + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.name = data["name"] + self.classification_code = data["classification_code"] + self.scheme = data["scheme"] + self.scheme_uri = data["scheme_uri"] + self.condition_uri = data["condition_uri"] + self.study.touch() + + def validate(self): + """Validates the study""" + violations: list = [] + return violations diff --git a/model/study_metadata/study_contact.py b/model/study_metadata/study_contact.py deleted file mode 100644 index d2c5c5d3..00000000 --- a/model/study_metadata/study_contact.py +++ /dev/null @@ -1,82 +0,0 @@ -import datetime -import uuid -from datetime import timezone - -from model import Study - -from ..db import db - - -class StudyContact(db.Model): # type: ignore - """A study is a collection of datasets and participants""" - - def __init__(self, study: Study, role, central_contact): - self.id = str(uuid.uuid4()) - self.study = study - self.role = role - self.central_contact = central_contact - self.created_at = datetime.datetime.now(timezone.utc).timestamp() - - __tablename__ = "study_contact" - - id = db.Column(db.CHAR(36), primary_key=True) - name = db.Column(db.String, nullable=False) - affiliation = db.Column(db.String, nullable=False) - role = db.Column(db.String, nullable=True) - phone = db.Column(db.String, nullable=False) - phone_ext = db.Column(db.String, nullable=False) - email_address = db.Column(db.String, nullable=False) - central_contact = db.Column(db.BOOLEAN, nullable=False) - created_at = db.Column(db.BigInteger, nullable=False) - - study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False - ) - study = db.relationship("Study", back_populates="study_contact") - - def to_dict(self): - """Converts the study to a dictionary""" - return { - "id": self.id, - "name": self.name, - "affiliation": self.affiliation, - "role": self.role, - "phone": self.phone, - "phone_ext": self.phone_ext, - "email_address": self.email_address, - "central_contact": self.central_contact, - "created_at": self.created_at, - } - - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "id": self.id, - "name": self.name, - "affiliation": self.affiliation, - "phone": self.phone, - "email_address": self.email_address, - } - - @staticmethod - def from_data(study: Study, data: dict, role, central_contact): - """Creates a new study from a dictionary""" - study_contact = StudyContact(study, role, central_contact) - study_contact.update(data) - - return study_contact - - def update(self, data: dict): - """Updates the study from a dictionary""" - self.name = data["name"] - self.affiliation = data["affiliation"] - # self.role = data["role"] - self.phone = data["phone"] - self.phone_ext = data["phone_ext"] - self.email_address = data["email_address"] - self.study.touch() - - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index f4d33679..40567ca0 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -29,7 +29,7 @@ def __init__(self, study: Study): self.bio_spec_retention = None self.bio_spec_description = None self.target_duration = None - self.number_groups_cohorts = None + self.is_patient_registry = None __tablename__ = "study_design" @@ -50,7 +50,7 @@ def __init__(self, study: Study): bio_spec_retention = db.Column(db.String, nullable=True) bio_spec_description = db.Column(db.String, nullable=True) target_duration = db.Column(db.String, nullable=True) - number_groups_cohorts = db.Column(db.Integer, nullable=True) + is_patient_registry = db.Column(db.String, nullable=True) study_id = db.Column( db.CHAR(36), @@ -80,7 +80,7 @@ def to_dict(self): "bio_spec_retention": self.bio_spec_retention, "bio_spec_description": self.bio_spec_description, "target_duration": self.target_duration, - "number_groups_cohorts": self.number_groups_cohorts, + "is_patient_registry": self.is_patient_registry, } @staticmethod @@ -112,7 +112,7 @@ def update(self, data: dict): self.bio_spec_retention = data["bio_spec_retention"] self.bio_spec_description = data["bio_spec_description"] self.target_duration = data["target_duration"] - self.number_groups_cohorts = data["number_groups_cohorts"] + self.is_patient_registry = data["is_patient_registry"] self.study.touch() def validate(self): diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 1a636819..7370383f 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -11,7 +11,7 @@ class StudyEligibility(db.Model): # type: ignore def __init__(self, study: Study): self.study = study - self.gender = None + self.sex = None self.gender_based = None self.gender_description = "" self.minimum_age_value = None # 18 @@ -26,7 +26,7 @@ def __init__(self, study: Study): __tablename__ = "study_eligibility" - gender = db.Column(db.String, nullable=True) + sex = db.Column(db.String, nullable=True) gender_based = db.Column(db.String, nullable=True) gender_description = db.Column(db.String, nullable=False) minimum_age_value = db.Column(db.Integer, nullable=True) @@ -50,7 +50,7 @@ def __init__(self, study: Study): def to_dict(self): """Converts the study to a dictionary""" return { - "gender": self.gender, + "sex": self.sex, "gender_based": self.gender_based, "gender_description": self.gender_description, "minimum_age_unit": self.minimum_age_unit, @@ -70,7 +70,7 @@ def to_dict(self): def to_dict_metadata(self): """Converts the study metadata to a dictionary""" return { - "gender": self.gender, + "sex": self.sex, "minimum_age_value": self.minimum_age_value, "gender_based": self.gender_based, } @@ -85,7 +85,7 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - self.gender = data["gender"] + self.sex = data["sex"] self.gender_based = data["gender_based"] self.gender_description = data["gender_description"] self.minimum_age_value = data["minimum_age_value"] diff --git a/model/study_metadata/study_ipdsharing.py b/model/study_metadata/study_ipdsharing.py deleted file mode 100644 index 9152959c..00000000 --- a/model/study_metadata/study_ipdsharing.py +++ /dev/null @@ -1,77 +0,0 @@ -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY - -from model import Study - -from ..db import db - - -class StudyIpdsharing(db.Model): # type: ignore - """A study is a collection of datasets and participants""" - - def __init__(self, study): - self.study = study - self.ipd_sharing = None - self.ipd_sharing_description = "" - self.ipd_sharing_info_type_list = [] - self.ipd_sharing_time_frame = "" - self.ipd_sharing_access_criteria = "" - self.ipd_sharing_url = "" - - __tablename__ = "study_ipdsharing" - - ipd_sharing = db.Column(db.String, nullable=True) - ipd_sharing_description = db.Column(db.String, nullable=False) - ipd_sharing_info_type_list = db.Column(ARRAY(String), nullable=False) - ipd_sharing_time_frame = db.Column(db.String, nullable=False) - ipd_sharing_access_criteria = db.Column(db.String, nullable=False) - ipd_sharing_url = db.Column(db.String, nullable=False) - - study_id = db.Column( - db.CHAR(36), - db.ForeignKey("study.id", ondelete="CASCADE"), - primary_key=True, - nullable=False, - ) - study = db.relationship("Study", back_populates="study_ipdsharing") - - def to_dict(self): - """Converts the study to a dictionary""" - return { - "ipd_sharing": self.ipd_sharing, - "ipd_sharing_description": self.ipd_sharing_description, - "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, - "ipd_sharing_time_frame": self.ipd_sharing_time_frame, - "ipd_sharing_access_criteria": self.ipd_sharing_access_criteria, - "ipd_sharing_url": self.ipd_sharing_url, - } - - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "ipd_sharing": self.ipd_sharing, - "ipd_sharing_info_type_list": self.ipd_sharing_info_type_list, - } - - @staticmethod - def from_data(study: Study, data: dict): - """Creates a new study from a dictionary""" - study_ipdsharing = StudyIpdsharing(study) - study_ipdsharing.update(data) - - return study_ipdsharing - - def update(self, data: dict): - """Updates the study from a dictionary""" - self.ipd_sharing = data["ipd_sharing"] - self.ipd_sharing_description = data["ipd_sharing_description"] - self.ipd_sharing_info_type_list = data["ipd_sharing_info_type_list"] - self.ipd_sharing_time_frame = data["ipd_sharing_time_frame"] - self.ipd_sharing_access_criteria = data["ipd_sharing_access_criteria"] - self.ipd_sharing_url = data["ipd_sharing_url"] - self.study.touch() - - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations diff --git a/model/study_metadata/study_keywords.py b/model/study_metadata/study_keywords.py new file mode 100644 index 00000000..5ff213a9 --- /dev/null +++ b/model/study_metadata/study_keywords.py @@ -0,0 +1,73 @@ +import datetime +import uuid +from datetime import timezone + +from model import Study + +from ..db import db + + +class StudyKeywords(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.id = str(uuid.uuid4()) + self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + + __tablename__ = "study_keywords" + + id = db.Column(db.CHAR(36), primary_key=True) + name = db.Column(db.String, nullable=False) + classification_code = db.Column(db.String, nullable=False) + scheme = db.Column(db.String, nullable=False) + scheme_uri = db.Column(db.String, nullable=False) + keyword_uri = db.Column(db.String, nullable=False) + created_at = db.Column(db.String, nullable=False) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + nullable=False, + ) + study = db.relationship("Study", back_populates="study_keywords") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "name": self.name, + "classification_code": self.classification_code, + "scheme": self.scheme, + "scheme_uri": self.scheme_uri, + "keyword_uri": self.keyword_uri, + "created_at": self.created_at, + } + + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "name": self.name, + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_keywords = StudyKeywords(study) + study_keywords.update(data) + + return study_keywords + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.name = data["name"] + self.classification_code = data["classification_code"] + self.scheme = data["scheme"] + self.scheme_uri = data["scheme_uri"] + self.keyword_uri = data["keyword_uri"] + self.study.touch() + + def validate(self): + """Validates the study""" + violations: list = [] + return violations diff --git a/model/study_metadata/study_link.py b/model/study_metadata/study_link.py deleted file mode 100644 index aa3ba44e..00000000 --- a/model/study_metadata/study_link.py +++ /dev/null @@ -1,64 +0,0 @@ -import datetime -import uuid -from datetime import timezone - -from model import Study - -from ..db import db - - -class StudyLink(db.Model): # type: ignore - """A study is a collection of datasets and participants""" - - def __init__(self, study): - self.id = str(uuid.uuid4()) - self.study = study - self.created_at = datetime.datetime.now(timezone.utc).timestamp() - - __tablename__ = "study_link" - - id = db.Column(db.CHAR(36), primary_key=True) - url = db.Column(db.String, nullable=False) - title = db.Column(db.String, nullable=False) - created_at = db.Column(db.BigInteger, nullable=False) - - study_id = db.Column( - db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False - ) - study = db.relationship("Study", back_populates="study_link") - - def to_dict(self): - """Converts the study to a dictionary""" - return { - "id": self.id, - "url": self.url, - "title": self.title, - "created_at": self.created_at, - } - - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "title": self.title, - "url": self.url, - "id": self.id, - } - - @staticmethod - def from_data(study: Study, data: dict): - """Creates a new study from a dictionary""" - study_link = StudyLink(study) - study_link.update(data) - - return study_link - - def update(self, data: dict): - """Updates the study from a dictionary""" - self.url = data["url"] - self.title = data["title"] - self.study.touch() - - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index 5e4d55df..cbfe9f91 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -29,6 +29,11 @@ def __init__(self, study): study_id = db.Column( db.CHAR(36), db.ForeignKey("study.id", ondelete="CASCADE"), nullable=False ) + study_location_contact_list = db.relationship( + "StudyLocationContactList", + back_populates="study_location", + cascade="all, delete", + ) study = db.relationship("Study", back_populates="study_location") def to_dict(self): diff --git a/model/study_metadata/study_location_contact_list.py b/model/study_metadata/study_location_contact_list.py new file mode 100644 index 00000000..c6895364 --- /dev/null +++ b/model/study_metadata/study_location_contact_list.py @@ -0,0 +1,89 @@ +import datetime +import uuid +from datetime import timezone + +from model import Study + +from ..db import db + + +class StudyLocationContactList(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.id = str(uuid.uuid4()) + self.study = study + self.created_at = datetime.datetime.now(timezone.utc).timestamp() + + __tablename__ = "study_location_location_list" + + id = db.Column(db.CHAR(36), primary_key=True) + first_name = db.Column(db.String, nullable=False) + last_name = db.Column(db.String, nullable=False) + identifier = db.Column(db.String, nullable=False) + identifier_scheme = db.Column(db.String, nullable=False) + zip = db.Column(db.String, nullable=False) + role = db.Column(db.String, nullable=False) + phone = db.Column(db.String, nullable=False) + phone_ext = db.Column(db.String, nullable=False) + email_address = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) + + study_location_id = db.Column( + db.CHAR(36), + db.ForeignKey("study_location.id", ondelete="CASCADE"), + nullable=False, + ) + study_location = db.relationship( + "StudyLocation", back_populates="study_location_contact_list" + ) + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "id": self.id, + "first_name": self.first_name, + "last_name": self.last_name, + "identifier": self.identifier, + "identifier_scheme": self.identifier_scheme, + "zip": self.zip, + "role": self.role, + "phone": self.phone, + "phone_ext": self.phone_ext, + "email_address": self.email_address, + "created_at": self.created_at, + } + + # def to_dict_metadata(self): + # """Converts the study metadata to a dictionary""" + # return { + # "id": self.id, + # "facility": self.facility, + # "country": self.country, + # } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_location_contact_list = StudyLocationContactList(study) + study_location_contact_list.update(data) + + return study_location_contact_list + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.first_name = data["first_name"] + self.last_name = data["last_name"] + self.identifier = data["identifier"] + self.identifier_scheme = data["identifier_scheme"] + self.zip = data["zip"] + self.role = data["role"] + self.phone = data["phone"] + self.phone_ext = data["phone_ext"] + self.email_address = data["email_address"] + self.study.touch() + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations: list = [] + return violations diff --git a/model/study_metadata/study_other.py b/model/study_metadata/study_other.py index 1d7cd8e3..4f20e4b5 100644 --- a/model/study_metadata/study_other.py +++ b/model/study_metadata/study_other.py @@ -1,6 +1,3 @@ -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY - from model import Study from ..db import db @@ -11,16 +8,10 @@ class StudyOther(db.Model): # type: ignore def __init__(self, study): self.study = study - self.oversight_has_dmc = False - self.conditions = [] - self.keywords = [] self.size = 0 __tablename__ = "study_other" - oversight_has_dmc = db.Column(db.BOOLEAN, nullable=False) - conditions = db.Column(ARRAY(String), nullable=False) - keywords = db.Column(ARRAY(String), nullable=False) size = db.Column(db.BigInteger, nullable=False) study_id = db.Column( @@ -35,19 +26,14 @@ def to_dict(self): """Converts the study to a dictionary""" return { "id": self.study_id, - "oversight_has_dmc": self.oversight_has_dmc, - "conditions": self.conditions, - "keywords": self.keywords, "size": self.size, } - # def to_dict_metadata(self): - # """Converts the study metadata to a dictionary""" - # return { - # "oversight_has_dmc": self.oversight_has_dmc, - # "conditions": self.conditions, - # "keywords": self.keywords - # } + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "size": self.size, + } @staticmethod def from_data(study: Study, data: dict): @@ -59,9 +45,7 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - self.oversight_has_dmc = data["oversight_has_dmc"] - self.conditions = data["conditions"] - self.keywords = data["keywords"] + self.size = data["size"] self.study.touch() diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index 08c1f84e..a6f6c561 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -14,13 +14,29 @@ def __init__(self, study): self.id = str(uuid.uuid4()) self.study = study self.created_at = datetime.datetime.now(timezone.utc).timestamp() + # self.degree = "" + # self.identifier = "" + # self.identifier_scheme = "" + # self.identifier_scheme_uri = "" + # self.affiliation_identifier = "" + # self.affiliation_identifier_scheme = "" + # self.affiliation_identifier_scheme_uri = "" __tablename__ = "study_overall_official" id = db.Column(db.CHAR(36), primary_key=True) - name = db.Column(db.String, nullable=False) + first_name = db.Column(db.String, nullable=False) + last_name = db.Column(db.String, nullable=False) + degree = db.Column(db.String, nullable=False) + identifier = db.Column(db.String, nullable=False) + identifier_scheme = db.Column(db.String, nullable=False) + identifier_scheme_uri = db.Column(db.String, nullable=False) affiliation = db.Column(db.String, nullable=False) + affiliation_identifier = db.Column(db.String, nullable=False) + affiliation_identifier_scheme = db.Column(db.String, nullable=False) + affiliation_identifier_scheme_uri = db.Column(db.String, nullable=False) role = db.Column(db.String, nullable=True) + created_at = db.Column(db.BigInteger, nullable=False) study_id = db.Column( @@ -32,16 +48,25 @@ def to_dict(self): """Converts the study to a dictionary""" return { "id": self.id, - "name": self.name, + "first_name": self.first_name, + "last_name": self.last_name, "affiliation": self.affiliation, "role": self.role, + "degree": self.degree, + "identifier": self.identifier, + "identifier_scheme": self.identifier_scheme, + "identifier_scheme_uri": self.identifier_scheme_uri, + "affiliation_identifier": self.affiliation_identifier, + "affiliation_identifier_scheme": self.affiliation_identifier_scheme, + "affiliation_identifier_scheme_uri": self.affiliation_identifier_scheme_uri, "created_at": self.created_at, } def to_dict_metadata(self): """Converts the study metadata to a dictionary""" return { - "name": self.name, + "first_name": self.first_name, + "last_name": self.last_name, "affiliation": self.affiliation, "role": self.role, } @@ -56,8 +81,18 @@ def from_data(study: Study, data: dict): def update(self, data: dict): """Updates the study from a dictionary""" - self.name = data["name"] + self.first_name = data["first_name"] + self.last_name = data["last_name"] self.affiliation = data["affiliation"] + self.degree = data["degree"] + self.identifier = data["identifier"] + self.identifier_scheme = data["identifier_scheme"] + self.identifier_scheme_uri = data["identifier_scheme_uri"] + self.affiliation_identifier = data["affiliation_identifier"] + self.affiliation_identifier_scheme = data["affiliation_identifier_scheme"] + self.affiliation_identifier_scheme_uri = data[ + "affiliation_identifier_scheme_uri" + ] self.role = data["role"] self.study.touch() diff --git a/model/study_metadata/study_oversight.py b/model/study_metadata/study_oversight.py new file mode 100644 index 00000000..98e95e41 --- /dev/null +++ b/model/study_metadata/study_oversight.py @@ -0,0 +1,59 @@ +from model import Study + +from ..db import db + + +class StudyOversight(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.study = study + self.fda_regulated_drug = "" + self.fda_regulated_device = "" + self.human_subject_review_status = "" + self.has_dmc = "" + + __tablename__ = "study_oversight" + + fda_regulated_drug = db.Column(db.String, nullable=False) + fda_regulated_device = db.Column(db.String, nullable=False) + human_subject_review_status = db.Column(db.String, nullable=False) + has_dmc = db.Column(db.String, nullable=False) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, + ) + study = db.relationship("Study", back_populates="study_oversight") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "fda_regulated_drug": self.fda_regulated_drug, + "fda_regulated_device": self.fda_regulated_device, + "human_subject_review_status": self.human_subject_review_status, + "has_dmc": self.has_dmc, + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_keywords = StudyOversight(study) + study_keywords.update(data) + + return study_keywords + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.fda_regulated_drug = data["fda_regulated_drug"] + self.fda_regulated_device = data["fda_regulated_device"] + self.human_subject_review_status = data["human_subject_review_status"] + self.has_dmc = data["has_dmc"] + self.study.touch() + + def validate(self): + """Validates the study""" + violations: list = [] + return violations diff --git a/model/study_metadata/study_sponsors.py b/model/study_metadata/study_sponsors.py new file mode 100644 index 00000000..1e7832ca --- /dev/null +++ b/model/study_metadata/study_sponsors.py @@ -0,0 +1,147 @@ +from model import Study + +from ..db import db + + +class StudySponsors(db.Model): # type: ignore + """A study is a collection of datasets and participants""" + + def __init__(self, study): + self.study = study + self.responsible_party_type = None + self.responsible_party_investigator_first_name = "" + self.responsible_party_investigator_last_name = "" + self.responsible_party_investigator_title = "" + self.responsible_party_investigator_identifier_value = "" + self.responsible_party_investigator_identifier_scheme = "" + self.responsible_party_investigator_identifier_scheme_uri = "" + self.responsible_party_investigator_affiliation_name = "" + self.responsible_party_investigator_affiliation_identifier_value = "" + self.responsible_party_investigator_affiliation_identifier_scheme = "" + self.responsible_party_investigator_affiliation_identifier_scheme_uri = "" + self.lead_sponsor_name = "" + self.lead_sponsor_identifier = "" + self.lead_sponsor_identifier_scheme = "" + self.lead_sponsor_identifier_scheme_uri = "" + + __tablename__ = "study_sponsors" + + responsible_party_type = db.Column(db.String, nullable=True) + responsible_party_investigator_first_name = db.Column(db.String, nullable=False) + responsible_party_investigator_last_name = db.Column(db.String, nullable=False) + responsible_party_investigator_title = db.Column(db.String, nullable=False) + responsible_party_investigator_identifier_value = db.Column( + db.String, nullable=False + ) + responsible_party_investigator_identifier_scheme = db.Column( + db.String, nullable=False + ) + responsible_party_investigator_identifier_scheme_uri = db.Column( + db.String, nullable=False + ) + responsible_party_investigator_affiliation_name = db.Column( + db.String, nullable=False + ) + responsible_party_investigator_affiliation_identifier_value = db.Column( + db.String, nullable=False + ) + responsible_party_investigator_affiliation_identifier_scheme = db.Column( + db.String, nullable=False + ) + responsible_party_investigator_affiliation_identifier_scheme_uri = db.Column( + db.String, nullable=False + ) + lead_sponsor_name = db.Column(db.String, nullable=False) + lead_sponsor_identifier = db.Column(db.String, nullable=False) + lead_sponsor_identifier_scheme = db.Column(db.String, nullable=False) + lead_sponsor_identifier_scheme_uri = db.Column(db.String, nullable=False) + + study_id = db.Column( + db.CHAR(36), + db.ForeignKey("study.id", ondelete="CASCADE"), + primary_key=True, + nullable=False, + ) + study = db.relationship("Study", back_populates="study_sponsors") + + def to_dict(self): + """Converts the study to a dictionary""" + return { + "responsible_party_type": self.responsible_party_type, + "responsible_party_investigator_first_name": self.responsible_party_investigator_first_name, + "responsible_party_investigator_last_name": self.responsible_party_investigator_last_name, + "responsible_party_investigator_title": self.responsible_party_investigator_title, + "responsible_party_investigator_identifier_value": self.responsible_party_investigator_identifier_value, + "responsible_party_investigator_identifier_scheme": self.responsible_party_investigator_identifier_scheme, + "responsible_party_investigator_identifier_scheme_uri": self.responsible_party_investigator_identifier_scheme_uri, # noqa: E501 + "responsible_party_investigator_affiliation_name": self.responsible_party_investigator_affiliation_name, + "responsible_party_investigator_affiliation_identifier_scheme": self.responsible_party_investigator_affiliation_identifier_scheme, # noqa: E501 + "responsible_party_investigator_affiliation_identifier_value": self.responsible_party_investigator_affiliation_identifier_value, # noqa: E501 + "responsible_party_investigator_affiliation_identifier_scheme_uri": self.responsible_party_investigator_affiliation_identifier_scheme_uri, # noqa: E501 + "lead_sponsor_name": self.lead_sponsor_name, + "lead_sponsor_identifier": self.lead_sponsor_identifier, + "lead_sponsor_identifier_scheme": self.lead_sponsor_identifier_scheme, + "lead_sponsor_identifier_scheme_uri": self.lead_sponsor_identifier_scheme_uri, + } + + def to_dict_metadata(self): + """Converts the study metadata to a dictionary""" + return { + "responsible_party_type": self.responsible_party_type, + "responsible_party_investigator_first_name": self.responsible_party_investigator_first_name, + "responsible_party_investigator_last_name": self.responsible_party_investigator_last_name, + "lead_sponsor_name": self.lead_sponsor_name, + } + + @staticmethod + def from_data(study: Study, data: dict): + """Creates a new study from a dictionary""" + study_sponsors = StudySponsors(study) + study_sponsors.update(data) + + return study_sponsors + + def update(self, data: dict): + """Updates the study from a dictionary""" + self.responsible_party_type = data["responsible_party_type"] + self.responsible_party_investigator_first_name = data[ + "responsible_party_investigator_first_name" + ] + self.responsible_party_investigator_last_name = data[ + "responsible_party_investigator_last_name" + ] + self.responsible_party_investigator_title = data[ + "responsible_party_investigator_title" + ] + self.responsible_party_investigator_identifier_value = data[ + "responsible_party_investigator_identifier_value" + ] + self.responsible_party_investigator_identifier_scheme = data[ + "responsible_party_investigator_identifier_scheme" + ] + self.responsible_party_investigator_identifier_scheme_uri = data[ + "responsible_party_investigator_identifier_scheme_uri" + ] + self.responsible_party_investigator_affiliation_name = data[ + "responsible_party_investigator_affiliation_name" + ] + self.responsible_party_investigator_affiliation_identifier_scheme = data[ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + self.responsible_party_investigator_affiliation_identifier_value = data[ + "responsible_party_investigator_affiliation_identifier_value" + ] + self.responsible_party_investigator_affiliation_identifier_scheme_uri = data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + self.lead_sponsor_name = data["lead_sponsor_name"] + self.lead_sponsor_identifier = data["lead_sponsor_identifier"] + self.lead_sponsor_identifier_scheme = data["lead_sponsor_identifier_scheme"] + self.lead_sponsor_identifier_scheme_uri = data[ + "lead_sponsor_identifier_scheme_uri" + ] + + def validate(self): + """Validates the lead_sponsor_last_name study""" + violations: list = [] + return violations diff --git a/model/study_metadata/study_sponsors_collaborators.py b/model/study_metadata/study_sponsors_collaborators.py deleted file mode 100644 index d4bb84db..00000000 --- a/model/study_metadata/study_sponsors_collaborators.py +++ /dev/null @@ -1,96 +0,0 @@ -from sqlalchemy import String -from sqlalchemy.dialects.postgresql import ARRAY - -from model import Study - -from ..db import db - - -class StudySponsorsCollaborators(db.Model): # type: ignore - """A study is a collection of datasets and participants""" - - def __init__(self, study): - self.study = study - self.responsible_party_type = None - self.responsible_party_investigator_name = "" - self.responsible_party_investigator_title = "" - self.responsible_party_investigator_affiliation = "" - self.lead_sponsor_name = "" - self.collaborator_name = [] - - __tablename__ = "study_sponsors_collaborators" - - responsible_party_type = db.Column(db.String, nullable=True) - responsible_party_investigator_name = db.Column(db.String, nullable=False) - responsible_party_investigator_title = db.Column(db.String, nullable=False) - responsible_party_investigator_affiliation = db.Column(db.String, nullable=False) - lead_sponsor_name = db.Column(db.String, nullable=False) - collaborator_name = db.Column(ARRAY(String), nullable=False) - - study_id = db.Column( - db.CHAR(36), - db.ForeignKey("study.id", ondelete="CASCADE"), - primary_key=True, - nullable=False, - ) - study = db.relationship("Study", back_populates="study_sponsors_collaborators") - - def to_dict(self): - """Converts the study to a dictionary""" - return { - "responsible_party_type": self.responsible_party_type, - "responsible_party_investigator_name": self.responsible_party_investigator_name, - "responsible_party_investigator_title": self.responsible_party_investigator_title, - "responsible_party_investigator_affiliation": self.responsible_party_investigator_affiliation, - "lead_sponsor_name": self.lead_sponsor_name, - } - - def to_dict_metadata(self): - """Converts the study metadata to a dictionary""" - return { - "responsible_party_type": self.responsible_party_type, - "responsible_party_investigator_name": self.responsible_party_investigator_name, - # "collaborator_name": self.collaborator_name, - "lead_sponsor_name": self.lead_sponsor_name, - } - - @staticmethod - def from_data(study: Study, data: dict): - """Creates a new study from a dictionary""" - study_sponsors_collaborators = StudySponsorsCollaborators(study) - study_sponsors_collaborators.update(data) - - return study_sponsors_collaborators - - def update(self, data: dict): - """Updates the study from a dictionary""" - self.responsible_party_type = data["responsible_party_type"] - - self.responsible_party_investigator_name = data[ - "responsible_party_investigator_name" - ] - self.responsible_party_investigator_title = data[ - "responsible_party_investigator_title" - ] - self.responsible_party_investigator_affiliation = data[ - "responsible_party_investigator_affiliation" - ] - self.lead_sponsor_name = data["lead_sponsor_name"] - - @staticmethod - def from_data_(study: Study, data: dict): - """Creates a new study from a dictionary""" - study_sponsors_collaborators = StudySponsorsCollaborators(study) - study_sponsors_collaborators.update(data) - - return study_sponsors_collaborators - - def update_collaborators(self, data: dict): - """Updates the study from a dictionary""" - self.collaborator_name = data["collaborator_name"] - self.study.touch() - - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations diff --git a/tests/conftest.py b/tests/conftest.py index cd1ddc1e..96c63dff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,6 +23,9 @@ pytest.global_arm_id = "" pytest.global_available_ipd_id = "" pytest.global_cc_id = "" +pytest.global_collaborators_id = "" +pytest.global_conditions_id = "" +pytest.global_keywords_id = "" pytest.global_identification_id = "" pytest.global_intervention_id = "" pytest.global_link_id = "" @@ -43,6 +46,13 @@ pytest.global_available_ipd_id_editor = "" pytest.global_admin_cc_id_admin = "" pytest.global_editor_cc_id_editor = "" +pytest.global_admin_collaborators_id_admin = "" +pytest.global_editor_collaborators_id_editor = "" +pytest.global_admin_conditions_id_admin = "" +pytest.global_editor_conditions_id_editor = "" +pytest.global_admin_keywords_id_admin = "" +pytest.global_editor_keywords_id_editor = "" + pytest.global_identification_id_admin = "" pytest.global_identification_id_editor = "" pytest.global_intervention_id_admin = "" diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index d70988a2..0c4257fc 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -17,6 +17,7 @@ def test_post_study(_logged_in_client): json={ "title": "Study Title", "image": "https://api.dicebear.com/6.x/adventurer/svg", + "acronym": "acronym", }, ) @@ -25,6 +26,7 @@ def test_post_study(_logged_in_client): assert response_data["title"] == "Study Title" assert response_data["image"] == "https://api.dicebear.com/6.x/adventurer/svg" + assert response_data["acronym"] == "acronym" pytest.global_study_id = response_data @@ -92,6 +94,7 @@ def test_update_study(clients): json={ "title": "Study Title Updated", "image": pytest.global_study_id["image"], # type: ignore + "acronym": pytest.global_study_id["acronym"], # type: ignore }, ) @@ -101,6 +104,7 @@ def test_update_study(clients): assert response_data["title"] == "Study Title Updated" assert response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore assert response_data["id"] == pytest.global_study_id["id"] # type: ignore admin_response = _admin_client.put( @@ -108,6 +112,7 @@ def test_update_study(clients): json={ "title": "Admin Study Title", "image": pytest.global_study_id["image"], # type: ignore + "acronym": pytest.global_study_id["acronym"], # type: ignore }, ) @@ -116,6 +121,7 @@ def test_update_study(clients): pytest.global_study_id = admin_response_data assert admin_response_data["title"] == "Admin Study Title" + assert admin_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore @@ -124,6 +130,7 @@ def test_update_study(clients): json={ "title": "Editor Study Title", "image": pytest.global_study_id["image"], # type: ignore + "acronym": pytest.global_study_id["acronym"], # type: ignore }, ) @@ -133,6 +140,7 @@ def test_update_study(clients): assert editor_response_data["title"] == "Editor Study Title" assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert editor_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore viewer_response = _viewer_client.put( @@ -140,6 +148,7 @@ def test_update_study(clients): json={ "title": "Viewer Study Title", "image": pytest.global_study_id["image"], # type: ignore + "acronym": pytest.global_study_id["acronym"], # type: ignore }, ) @@ -176,18 +185,22 @@ def test_get_study_by_id(clients): assert response_data["id"] == pytest.global_study_id["id"] # type: ignore assert response_data["title"] == pytest.global_study_id["title"] # type: ignore assert response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert admin_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert admin_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert editor_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert editor_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore assert viewer_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert viewer_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert viewer_response_data["image"] == pytest.global_study_id["image"] # type: ignore + assert viewer_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore def test_delete_studies_created(clients): @@ -205,6 +218,7 @@ def test_delete_studies_created(clients): json={ "title": "Delete Me", "image": "https://api.dicebear.com/6.x/adventurer/svg", + "acronym": "acronym", }, ) diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 5791d104..7090108f 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -3026,7 +3026,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -3056,7 +3056,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -3084,7 +3084,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -3110,7 +3110,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -3285,7 +3285,7 @@ def test_delete_dataset_related_identifier_metadata(clients): assert editor_response.status_code == 204 -# ------------------- RIGHTS METADATA ------------------- # +# # ------------------- RIGHTS METADATA ------------------- # def test_post_dataset_rights_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID @@ -3304,6 +3304,7 @@ def test_post_dataset_rights_metadata(clients): { "identifier": "Identifier", "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", "rights": "Rights", "uri": "URI", "license_text": "license text", @@ -3319,6 +3320,7 @@ def test_post_dataset_rights_metadata(clients): assert response_data[0]["identifier"] == "Identifier" assert response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" assert response_data[0]["rights"] == "Rights" assert response_data[0]["uri"] == "URI" assert response_data[0]["license_text"] == "license text" @@ -3328,9 +3330,10 @@ def test_post_dataset_rights_metadata(clients): json=[ { "identifier": "Admin Identifier", - "identifier_scheme": "Admin Identifier Scheme", - "rights": "Admin Rights", - "uri": "Admin URI", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", "license_text": "license text", } ], @@ -3343,9 +3346,10 @@ def test_post_dataset_rights_metadata(clients): pytest.global_dataset_rights_id_admin = admin_response_data[0]["id"] assert admin_response_data[0]["identifier"] == "Admin Identifier" - assert admin_response_data[0]["identifier_scheme"] == "Admin Identifier Scheme" - assert admin_response_data[0]["rights"] == "Admin Rights" - assert admin_response_data[0]["uri"] == "Admin URI" + assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" + assert admin_response_data[0]["rights"] == "Rights" + assert admin_response_data[0]["uri"] == "URI" assert admin_response_data[0]["license_text"] == "license text" editor_response = _editor_client.post( @@ -3353,9 +3357,10 @@ def test_post_dataset_rights_metadata(clients): json=[ { "identifier": "Editor Identifier", - "identifier_scheme": "Editor Identifier Scheme", - "rights": "Editor Rights", - "uri": "Editor URI", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", "license_text": "license text", } ], @@ -3366,9 +3371,10 @@ def test_post_dataset_rights_metadata(clients): pytest.global_dataset_rights_id_editor = editor_response_data[0]["id"] assert editor_response_data[0]["identifier"] == "Editor Identifier" - assert editor_response_data[0]["identifier_scheme"] == "Editor Identifier Scheme" - assert editor_response_data[0]["rights"] == "Editor Rights" - assert editor_response_data[0]["uri"] == "Editor URI" + assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" + assert editor_response_data[0]["rights"] == "Rights" + assert editor_response_data[0]["uri"] == "URI" assert editor_response_data[0]["license_text"] == "license text" viewer_response = _viewer_client.post( @@ -3376,9 +3382,10 @@ def test_post_dataset_rights_metadata(clients): json=[ { "identifier": "Viewer Identifier", - "identifier_scheme": "Viewer Identifier Scheme", - "rights": "Viewer Rights", - "uri": "Viewer URI", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", "license_text": "license text", } ], @@ -3424,66 +3431,86 @@ def test_get_dataset_rights_metadata(clients): assert response_data[0]["identifier"] == "Identifier" assert response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" assert response_data[0]["rights"] == "Rights" assert response_data[0]["uri"] == "URI" assert response_data[0]["license_text"] == "license text" - assert response_data[1]["identifier"] == "Admin Identifier" - assert response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" - assert response_data[1]["rights"] == "Admin Rights" - assert response_data[1]["uri"] == "Admin URI" - assert response_data[1]["license_text"] == "license text" - assert response_data[2]["identifier"] == "Editor Identifier" - assert response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" - assert response_data[2]["rights"] == "Editor Rights" - assert response_data[2]["uri"] == "Editor URI" - assert response_data[2]["license_text"] == "license text" assert admin_response_data[0]["identifier"] == "Identifier" assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" assert admin_response_data[0]["rights"] == "Rights" assert admin_response_data[0]["uri"] == "URI" assert admin_response_data[0]["license_text"] == "license text" - assert admin_response_data[1]["identifier"] == "Admin Identifier" - assert admin_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" - assert admin_response_data[1]["rights"] == "Admin Rights" - assert admin_response_data[1]["uri"] == "Admin URI" - assert admin_response_data[1]["license_text"] == "license text" - assert admin_response_data[2]["identifier"] == "Editor Identifier" - assert admin_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" - assert admin_response_data[2]["rights"] == "Editor Rights" - assert admin_response_data[2]["uri"] == "Editor URI" - assert admin_response_data[2]["license_text"] == "license text" assert editor_response_data[0]["identifier"] == "Identifier" assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" assert editor_response_data[0]["rights"] == "Rights" assert editor_response_data[0]["uri"] == "URI" assert editor_response_data[0]["license_text"] == "license text" + + assert response_data[1]["identifier"] == "Admin Identifier" + assert response_data[1]["identifier_scheme"] == "Identifier Scheme" + assert response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data[1]["rights"] == "Rights" + assert response_data[1]["uri"] == "URI" + assert response_data[1]["license_text"] == "license text" + + assert admin_response_data[1]["identifier"] == "Admin Identifier" + assert admin_response_data[1]["identifier_scheme"] == "Identifier Scheme" + assert admin_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" + assert admin_response_data[1]["rights"] == "Rights" + assert admin_response_data[1]["uri"] == "URI" + assert admin_response_data[1]["license_text"] == "license text" + assert editor_response_data[1]["identifier"] == "Admin Identifier" - assert editor_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" - assert editor_response_data[1]["rights"] == "Admin Rights" - assert editor_response_data[1]["uri"] == "Admin URI" + assert editor_response_data[1]["identifier_scheme"] == "Identifier Scheme" + assert editor_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" + assert editor_response_data[1]["rights"] == "Rights" + assert editor_response_data[1]["uri"] == "URI" assert editor_response_data[1]["license_text"] == "license text" + + assert response_data[2]["identifier"] == "Editor Identifier" + assert response_data[2]["identifier_scheme"] == "Identifier Scheme" + assert response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data[2]["rights"] == "Rights" + assert response_data[2]["uri"] == "URI" + assert response_data[2]["license_text"] == "license text" + + assert admin_response_data[2]["identifier"] == "Editor Identifier" + assert admin_response_data[2]["identifier_scheme"] == "Identifier Scheme" + assert admin_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" + assert admin_response_data[2]["rights"] == "Rights" + assert admin_response_data[2]["uri"] == "URI" + assert admin_response_data[2]["license_text"] == "license text" + assert editor_response_data[2]["identifier"] == "Editor Identifier" - assert editor_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" - assert editor_response_data[2]["rights"] == "Editor Rights" - assert editor_response_data[2]["uri"] == "Editor URI" + assert editor_response_data[2]["identifier_scheme"] == "Identifier Scheme" + assert editor_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" + assert editor_response_data[2]["rights"] == "Rights" + assert editor_response_data[2]["uri"] == "URI" assert editor_response_data[2]["license_text"] == "license text" assert viewer_response_data[0]["identifier"] == "Identifier" assert viewer_response_data[0]["identifier_scheme"] == "Identifier Scheme" + assert viewer_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" assert viewer_response_data[0]["rights"] == "Rights" assert viewer_response_data[0]["uri"] == "URI" assert viewer_response_data[0]["license_text"] == "license text" + assert viewer_response_data[1]["identifier"] == "Admin Identifier" - assert viewer_response_data[1]["identifier_scheme"] == "Admin Identifier Scheme" - assert viewer_response_data[1]["rights"] == "Admin Rights" - assert viewer_response_data[1]["uri"] == "Admin URI" + assert viewer_response_data[1]["identifier_scheme"] == "Identifier Scheme" + assert viewer_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" + assert viewer_response_data[1]["rights"] == "Rights" + assert viewer_response_data[1]["uri"] == "URI" assert viewer_response_data[1]["license_text"] == "license text" + assert viewer_response_data[2]["identifier"] == "Editor Identifier" - assert viewer_response_data[2]["identifier_scheme"] == "Editor Identifier Scheme" - assert viewer_response_data[2]["rights"] == "Editor Rights" - assert viewer_response_data[2]["uri"] == "Editor URI" + assert viewer_response_data[2]["identifier_scheme"] == "Identifier Scheme" + assert viewer_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" + assert viewer_response_data[2]["rights"] == "Rights" + assert viewer_response_data[2]["uri"] == "URI" assert viewer_response_data[2]["license_text"] == "license text" diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index 848b13f0..8a9623ea 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -247,206 +247,6 @@ def test_delete_arm_metadata(clients): assert editor_response.status_code == 204 -# ------------------- IPD METADATA ------------------- # -def test_post_available_ipd_metadata(clients): - """ - GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (POST) - THEN check that the response is vaild and new IPD was created - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/available-ipd", - json=[ - { - "identifier": "identifier1", - "type": "Clinical Study Report", - "url": "google.com", - "comment": "comment1", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_available_ipd_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "identifier1" - assert response_data[0]["type"] == "Clinical Study Report" - assert response_data[0]["url"] == "google.com" - assert response_data[0]["comment"] == "comment1" - - admin_response = _admin_client.post( - f"/study/{study_id}/metadata/available-ipd", - json=[ - { - "identifier": "identifier2", - "type": "Clinical Study Report", - "url": "google.com", - "comment": "comment2", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_available_ipd_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "identifier2" - assert admin_response_data[0]["type"] == "Clinical Study Report" - assert admin_response_data[0]["url"] == "google.com" - assert admin_response_data[0]["comment"] == "comment2" - - editor_response = _editor_client.post( - f"/study/{study_id}/metadata/available-ipd", - json=[ - { - "identifier": "identifier3", - "type": "Clinical Study Report", - "url": "google.com", - "comment": "comment3", - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_available_ipd_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["identifier"] == "identifier3" - assert editor_response_data[0]["type"] == "Clinical Study Report" - assert editor_response_data[0]["url"] == "google.com" - assert editor_response_data[0]["comment"] == "comment3" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/available-ipd", - json=[ - { - "identifier": "identifier4", - "type": "Clinical Study Report", - "url": "google.com", - "comment": "comment4", - } - ], - ) - - assert viewer_response.status_code == 403 - - -def test_get_available_ipd_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (GET) - THEN check that the response is vaild and retrieves the available IPD(s) - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/available-ipd") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/available-ipd") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/available-ipd") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/available-ipd") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data[0]["identifier"] == "identifier1" - assert response_data[0]["type"] == "Clinical Study Report" - assert response_data[0]["url"] == "google.com" - assert response_data[0]["comment"] == "comment1" - assert response_data[1]["identifier"] == "identifier2" - assert response_data[1]["type"] == "Clinical Study Report" - assert response_data[1]["url"] == "google.com" - assert response_data[1]["comment"] == "comment2" - assert response_data[2]["identifier"] == "identifier3" - assert response_data[2]["type"] == "Clinical Study Report" - assert response_data[2]["url"] == "google.com" - assert response_data[2]["comment"] == "comment3" - - assert admin_response_data[0]["identifier"] == "identifier1" - assert admin_response_data[0]["type"] == "Clinical Study Report" - assert admin_response_data[0]["url"] == "google.com" - assert admin_response_data[0]["comment"] == "comment1" - assert admin_response_data[1]["identifier"] == "identifier2" - assert admin_response_data[1]["type"] == "Clinical Study Report" - assert admin_response_data[1]["url"] == "google.com" - assert admin_response_data[1]["comment"] == "comment2" - assert admin_response_data[2]["identifier"] == "identifier3" - assert admin_response_data[2]["type"] == "Clinical Study Report" - assert admin_response_data[2]["url"] == "google.com" - assert admin_response_data[2]["comment"] == "comment3" - - assert editor_response_data[0]["identifier"] == "identifier1" - assert editor_response_data[0]["type"] == "Clinical Study Report" - assert editor_response_data[0]["url"] == "google.com" - assert editor_response_data[0]["comment"] == "comment1" - assert editor_response_data[1]["identifier"] == "identifier2" - assert editor_response_data[1]["type"] == "Clinical Study Report" - assert editor_response_data[1]["url"] == "google.com" - assert editor_response_data[1]["comment"] == "comment2" - assert editor_response_data[2]["identifier"] == "identifier3" - assert editor_response_data[2]["type"] == "Clinical Study Report" - assert editor_response_data[2]["url"] == "google.com" - assert editor_response_data[2]["comment"] == "comment3" - - assert viewer_response_data[0]["identifier"] == "identifier1" - assert viewer_response_data[0]["type"] == "Clinical Study Report" - assert viewer_response_data[0]["url"] == "google.com" - assert viewer_response_data[0]["comment"] == "comment1" - assert viewer_response_data[1]["identifier"] == "identifier2" - assert viewer_response_data[1]["type"] == "Clinical Study Report" - assert viewer_response_data[1]["url"] == "google.com" - assert viewer_response_data[1]["comment"] == "comment2" - assert viewer_response_data[2]["identifier"] == "identifier3" - assert viewer_response_data[2]["type"] == "Clinical Study Report" - assert viewer_response_data[2]["url"] == "google.com" - assert viewer_response_data[2]["comment"] == "comment3" - - -def test_delete_available_ipd_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and available IPD ID - WHEN the '/study/{study_id}/metadata/available-id' endpoint is requested (DELETE) - THEN check that the response is vaild and deletes the available IPD - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - available_ipd_id = pytest.global_available_ipd_id - admin_avail_ipd = pytest.global_available_ipd_id_admin - editor_avail_ipd = pytest.global_available_ipd_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/available-ipd/{available_ipd_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/metadata/available-ipd/{available_ipd_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/available-ipd/{admin_avail_ipd}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/available-ipd/{editor_avail_ipd}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - # ------------------- CENTRAL CONTACT METADATA ------------------- # def test_post_cc_metadata(clients): """ @@ -461,12 +261,19 @@ def test_post_cc_metadata(clients): f"/study/{study_id}/metadata/central-contact", json=[ { - "name": "central-contact", "affiliation": "affiliation", - "role": "role", "phone": "808", "phone_ext": "909", "email_address": "sample@gmail.com", + "first_name": "central-contact", + "last_name": "central-contact", + "degree": "degree", + "identifier": "central-contact", + "identifier_scheme": "id", + "identifier_scheme_uri": "uri", + "affiliation_identifier": "affiliation identifier", + "affiliation_identifier_scheme": "affiliation identifier scheme", + "affiliation_identifier_scheme_uri": "affiliation identifier scheme uri", } ], ) @@ -477,24 +284,43 @@ def test_post_cc_metadata(clients): response_data = json.loads(response.data) pytest.global_cc_id = response_data[0]["id"] - assert response_data[0]["name"] == "central-contact" assert response_data[0]["affiliation"] == "affiliation" - assert response_data[0]["role"] is None assert response_data[0]["phone"] == "808" assert response_data[0]["phone_ext"] == "909" assert response_data[0]["email_address"] == "sample@gmail.com" - assert response_data[0]["central_contact"] is True + assert response_data[0]["first_name"] == "central-contact" + assert response_data[0]["last_name"] == "central-contact" + assert response_data[0]["degree"] == "degree" + assert response_data[0]["identifier"] == "central-contact" + assert response_data[0]["identifier_scheme"] == "id" + assert response_data[0]["identifier_scheme_uri"] == "uri" + assert response_data[0]["affiliation_identifier"] == "affiliation identifier" + assert ( + response_data[0]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + response_data[0]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) admin_response = _admin_client.post( f"/study/{study_id}/metadata/central-contact", json=[ { - "name": "admin-central-contact", "affiliation": "affiliation", - "role": "role", "phone": "808", "phone_ext": "909", - "email_address": "sample1@gmail.com", + "email_address": "sample@gmail.com", + "first_name": "admin-central-contact", + "last_name": "central-contact", + "degree": "degree", + "identifier": "central-contact", + "identifier_scheme": "id", + "identifier_scheme_uri": "uri", + "affiliation_identifier": "affiliation identifier", + "affiliation_identifier_scheme": "affiliation identifier scheme", + "affiliation_identifier_scheme_uri": "affiliation identifier scheme uri", } ], ) @@ -505,24 +331,43 @@ def test_post_cc_metadata(clients): admin_response_data = json.loads(admin_response.data) pytest.global_admin_cc_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0]["name"] == "admin-central-contact" assert admin_response_data[0]["affiliation"] == "affiliation" - assert admin_response_data[0]["role"] is None assert admin_response_data[0]["phone"] == "808" assert admin_response_data[0]["phone_ext"] == "909" - assert admin_response_data[0]["email_address"] == "sample1@gmail.com" - assert admin_response_data[0]["central_contact"] is True + assert admin_response_data[0]["email_address"] == "sample@gmail.com" + assert admin_response_data[0]["first_name"] == "admin-central-contact" + assert admin_response_data[0]["last_name"] == "central-contact" + assert admin_response_data[0]["degree"] == "degree" + assert admin_response_data[0]["identifier"] == "central-contact" + assert admin_response_data[0]["identifier_scheme"] == "id" + assert admin_response_data[0]["identifier_scheme_uri"] == "uri" + assert admin_response_data[0]["affiliation_identifier"] == "affiliation identifier" + assert ( + admin_response_data[0]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + admin_response_data[0]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) editor_response = _editor_client.post( f"/study/{study_id}/metadata/central-contact", json=[ { - "name": "editor-central-contact", "affiliation": "affiliation", - "role": "role", "phone": "808", "phone_ext": "909", - "email_address": "sample2@gmail.com", + "email_address": "sample@gmail.com", + "first_name": "editor-central-contact", + "last_name": "central-contact", + "degree": "degree", + "identifier": "central-contact", + "identifier_scheme": "id", + "identifier_scheme_uri": "uri", + "affiliation_identifier": "affiliation identifier", + "affiliation_identifier_scheme": "affiliation identifier scheme", + "affiliation_identifier_scheme_uri": "affiliation identifier scheme uri", } ], ) @@ -531,13 +376,25 @@ def test_post_cc_metadata(clients): editor_response_data = json.loads(editor_response.data) pytest.global_editor_cc_id_editor = editor_response_data[0]["id"] - assert editor_response_data[0]["name"] == "editor-central-contact" assert editor_response_data[0]["affiliation"] == "affiliation" - assert editor_response_data[0]["role"] is None assert editor_response_data[0]["phone"] == "808" assert editor_response_data[0]["phone_ext"] == "909" - assert editor_response_data[0]["email_address"] == "sample2@gmail.com" - assert editor_response_data[0]["central_contact"] is True + assert editor_response_data[0]["email_address"] == "sample@gmail.com" + assert editor_response_data[0]["first_name"] == "editor-central-contact" + assert editor_response_data[0]["last_name"] == "central-contact" + assert editor_response_data[0]["degree"] == "degree" + assert editor_response_data[0]["identifier"] == "central-contact" + assert editor_response_data[0]["identifier_scheme"] == "id" + assert editor_response_data[0]["identifier_scheme_uri"] == "uri" + assert editor_response_data[0]["affiliation_identifier"] == "affiliation identifier" + assert ( + editor_response_data[0]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + editor_response_data[0]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) def test_get_cc_metadata(clients): @@ -564,37 +421,245 @@ def test_get_cc_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["name"] == "central-contact" assert response_data[0]["affiliation"] == "affiliation" - assert response_data[0]["role"] is None assert response_data[0]["phone"] == "808" assert response_data[0]["phone_ext"] == "909" assert response_data[0]["email_address"] == "sample@gmail.com" - assert response_data[0]["central_contact"] is True + assert response_data[0]["first_name"] == "central-contact" + assert response_data[0]["last_name"] == "central-contact" + assert response_data[0]["degree"] == "degree" + assert response_data[0]["identifier"] == "central-contact" + assert response_data[0]["identifier_scheme"] == "id" + assert response_data[0]["identifier_scheme_uri"] == "uri" + assert response_data[0]["affiliation_identifier"] == "affiliation identifier" + assert ( + response_data[0]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + response_data[0]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) - assert admin_response_data[0]["name"] == "central-contact" assert admin_response_data[0]["affiliation"] == "affiliation" - assert admin_response_data[0]["role"] is None assert admin_response_data[0]["phone"] == "808" assert admin_response_data[0]["phone_ext"] == "909" assert admin_response_data[0]["email_address"] == "sample@gmail.com" - assert admin_response_data[0]["central_contact"] is True + assert admin_response_data[0]["first_name"] == "central-contact" + assert admin_response_data[0]["last_name"] == "central-contact" + assert admin_response_data[0]["degree"] == "degree" + assert admin_response_data[0]["identifier"] == "central-contact" + assert admin_response_data[0]["identifier_scheme"] == "id" + assert admin_response_data[0]["identifier_scheme_uri"] == "uri" + assert admin_response_data[0]["affiliation_identifier"] == "affiliation identifier" + assert ( + admin_response_data[0]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + admin_response_data[0]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) - assert editor_response_data[0]["name"] == "central-contact" assert editor_response_data[0]["affiliation"] == "affiliation" - assert editor_response_data[0]["role"] is None assert editor_response_data[0]["phone"] == "808" assert editor_response_data[0]["phone_ext"] == "909" assert editor_response_data[0]["email_address"] == "sample@gmail.com" - assert editor_response_data[0]["central_contact"] is True + assert editor_response_data[0]["first_name"] == "central-contact" + assert editor_response_data[0]["last_name"] == "central-contact" + assert editor_response_data[0]["degree"] == "degree" + assert editor_response_data[0]["identifier"] == "central-contact" + assert editor_response_data[0]["identifier_scheme"] == "id" + assert editor_response_data[0]["identifier_scheme_uri"] == "uri" + assert editor_response_data[0]["affiliation_identifier"] == "affiliation identifier" + assert ( + editor_response_data[0]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + editor_response_data[0]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) - assert viewer_response_data[0]["name"] == "central-contact" assert viewer_response_data[0]["affiliation"] == "affiliation" - assert viewer_response_data[0]["role"] is None assert viewer_response_data[0]["phone"] == "808" assert viewer_response_data[0]["phone_ext"] == "909" assert viewer_response_data[0]["email_address"] == "sample@gmail.com" - assert viewer_response_data[0]["central_contact"] is True + assert viewer_response_data[0]["first_name"] == "central-contact" + assert viewer_response_data[0]["last_name"] == "central-contact" + assert viewer_response_data[0]["degree"] == "degree" + assert viewer_response_data[0]["identifier"] == "central-contact" + assert viewer_response_data[0]["identifier_scheme"] == "id" + assert viewer_response_data[0]["identifier_scheme_uri"] == "uri" + assert viewer_response_data[0]["affiliation_identifier"] == "affiliation identifier" + assert ( + viewer_response_data[0]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + viewer_response_data[0]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert response_data[1]["affiliation"] == "affiliation" + assert response_data[1]["phone"] == "808" + assert response_data[1]["phone_ext"] == "909" + assert response_data[1]["email_address"] == "sample@gmail.com" + assert response_data[1]["first_name"] == "admin-central-contact" + assert response_data[1]["last_name"] == "central-contact" + assert response_data[1]["degree"] == "degree" + assert response_data[1]["identifier"] == "central-contact" + assert response_data[1]["identifier_scheme"] == "id" + assert response_data[1]["identifier_scheme_uri"] == "uri" + assert response_data[1]["affiliation_identifier"] == "affiliation identifier" + assert ( + response_data[1]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + response_data[1]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert admin_response_data[1]["affiliation"] == "affiliation" + assert admin_response_data[1]["phone"] == "808" + assert admin_response_data[1]["phone_ext"] == "909" + assert admin_response_data[1]["email_address"] == "sample@gmail.com" + assert admin_response_data[1]["first_name"] == "admin-central-contact" + assert admin_response_data[1]["last_name"] == "central-contact" + assert admin_response_data[1]["degree"] == "degree" + assert admin_response_data[1]["identifier"] == "central-contact" + assert admin_response_data[1]["identifier_scheme"] == "id" + assert admin_response_data[1]["identifier_scheme_uri"] == "uri" + assert admin_response_data[1]["affiliation_identifier"] == "affiliation identifier" + assert ( + admin_response_data[1]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + admin_response_data[1]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert editor_response_data[1]["affiliation"] == "affiliation" + assert editor_response_data[1]["phone"] == "808" + assert editor_response_data[1]["phone_ext"] == "909" + assert editor_response_data[1]["email_address"] == "sample@gmail.com" + assert editor_response_data[1]["first_name"] == "admin-central-contact" + assert editor_response_data[1]["last_name"] == "central-contact" + assert editor_response_data[1]["degree"] == "degree" + assert editor_response_data[1]["identifier"] == "central-contact" + assert editor_response_data[1]["identifier_scheme"] == "id" + assert editor_response_data[1]["identifier_scheme_uri"] == "uri" + assert editor_response_data[1]["affiliation_identifier"] == "affiliation identifier" + assert ( + editor_response_data[1]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + editor_response_data[1]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert viewer_response_data[1]["affiliation"] == "affiliation" + assert viewer_response_data[1]["phone"] == "808" + assert viewer_response_data[1]["phone_ext"] == "909" + assert viewer_response_data[1]["email_address"] == "sample@gmail.com" + assert viewer_response_data[1]["first_name"] == "admin-central-contact" + assert viewer_response_data[1]["last_name"] == "central-contact" + assert viewer_response_data[1]["degree"] == "degree" + assert viewer_response_data[1]["identifier"] == "central-contact" + assert viewer_response_data[1]["identifier_scheme"] == "id" + assert viewer_response_data[1]["identifier_scheme_uri"] == "uri" + assert viewer_response_data[1]["affiliation_identifier"] == "affiliation identifier" + assert ( + viewer_response_data[1]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + viewer_response_data[1]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert response_data[2]["affiliation"] == "affiliation" + assert response_data[2]["phone"] == "808" + assert response_data[2]["phone_ext"] == "909" + assert response_data[2]["email_address"] == "sample@gmail.com" + assert response_data[2]["first_name"] == "editor-central-contact" + assert response_data[2]["last_name"] == "central-contact" + assert response_data[2]["degree"] == "degree" + assert response_data[2]["identifier"] == "central-contact" + assert response_data[2]["identifier_scheme"] == "id" + assert response_data[2]["identifier_scheme_uri"] == "uri" + assert response_data[2]["affiliation_identifier"] == "affiliation identifier" + assert ( + response_data[2]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + response_data[2]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert admin_response_data[2]["affiliation"] == "affiliation" + assert admin_response_data[2]["phone"] == "808" + assert admin_response_data[2]["phone_ext"] == "909" + assert admin_response_data[2]["email_address"] == "sample@gmail.com" + assert admin_response_data[2]["first_name"] == "editor-central-contact" + assert admin_response_data[2]["last_name"] == "central-contact" + assert admin_response_data[2]["degree"] == "degree" + assert admin_response_data[2]["identifier"] == "central-contact" + assert admin_response_data[2]["identifier_scheme"] == "id" + assert admin_response_data[2]["identifier_scheme_uri"] == "uri" + assert admin_response_data[2]["affiliation_identifier"] == "affiliation identifier" + assert ( + admin_response_data[2]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + admin_response_data[2]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert editor_response_data[2]["affiliation"] == "affiliation" + assert editor_response_data[2]["phone"] == "808" + assert editor_response_data[2]["phone_ext"] == "909" + assert editor_response_data[2]["email_address"] == "sample@gmail.com" + assert editor_response_data[2]["first_name"] == "editor-central-contact" + assert editor_response_data[2]["last_name"] == "central-contact" + assert editor_response_data[2]["degree"] == "degree" + assert editor_response_data[2]["identifier"] == "central-contact" + assert editor_response_data[2]["identifier_scheme"] == "id" + assert editor_response_data[2]["identifier_scheme_uri"] == "uri" + assert editor_response_data[2]["affiliation_identifier"] == "affiliation identifier" + assert ( + editor_response_data[2]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + editor_response_data[2]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) + + assert viewer_response_data[2]["affiliation"] == "affiliation" + assert viewer_response_data[2]["phone"] == "808" + assert viewer_response_data[2]["phone_ext"] == "909" + assert viewer_response_data[2]["email_address"] == "sample@gmail.com" + assert viewer_response_data[2]["first_name"] == "editor-central-contact" + assert viewer_response_data[2]["last_name"] == "central-contact" + assert viewer_response_data[2]["degree"] == "degree" + assert viewer_response_data[2]["identifier"] == "central-contact" + assert viewer_response_data[2]["identifier_scheme"] == "id" + assert viewer_response_data[2]["identifier_scheme_uri"] == "uri" + assert viewer_response_data[2]["affiliation_identifier"] == "affiliation identifier" + assert ( + viewer_response_data[2]["affiliation_identifier_scheme"] + == "affiliation identifier scheme" + ) + assert ( + viewer_response_data[2]["affiliation_identifier_scheme_uri"] + == "affiliation identifier scheme uri" + ) def test_delete_cc_metadata(clients): @@ -631,7 +696,7 @@ def test_delete_cc_metadata(clients): # ------------------- COLLABORATORS METADATA ------------------- # -def test_put_collaborators_metadata(clients): +def test_post_collaborators_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/collaborators' @@ -641,46 +706,82 @@ def test_put_collaborators_metadata(clients): _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.put( + response = _logged_in_client.post( f"/study/{study_id}/metadata/collaborators", json=[ - "collaborator1123", + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) + pytest.global_collaborators_id = response_data[0]["id"] - assert response_data[0] == "collaborator1123" + assert response_data[0]["name"] == "collaborator1123" + assert response_data[0]["identifier"] == "collaborator1123" + assert response_data[0]["scheme"] == "collaborator1123" + assert response_data[0]["scheme_uri"] == "collaborator1123" - admin_response = _admin_client.put( + admin_response = _admin_client.post( f"/study/{study_id}/metadata/collaborators", json=[ - "admin-collaborator1123", + { + "name": "admin collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + } ], ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) - assert admin_response.status_code == 200 + assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) + pytest.global_admin_collaborators_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0] == "admin-collaborator1123" + assert admin_response_data[0]["name"] == "admin collaborator1123" + assert admin_response_data[0]["identifier"] == "collaborator1123" + assert admin_response_data[0]["scheme"] == "collaborator1123" + assert admin_response_data[0]["scheme_uri"] == "collaborator1123" - editor_response = _editor_client.put( + editor_response = _editor_client.post( f"/study/{study_id}/metadata/collaborators", json=[ - "editor-collaborator1123", + { + "name": "editor collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + } ], ) - assert editor_response.status_code == 200 + assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) + pytest.global_editor_collaborators_id_editor = editor_response_data[0]["id"] - assert editor_response_data[0] == "editor-collaborator1123" + assert editor_response_data[0]["name"] == "editor collaborator1123" + assert editor_response_data[0]["identifier"] == "collaborator1123" + assert editor_response_data[0]["scheme"] == "collaborator1123" + assert editor_response_data[0]["scheme_uri"] == "collaborator1123" - viewer_response = _viewer_client.put( + viewer_response = _viewer_client.post( f"/study/{study_id}/metadata/collaborators", json=[ - "viewer-collaborator1123", + { + "name": "editor collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + } ], ) @@ -711,14 +812,100 @@ def test_get_collaborators_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0] == "editor-collaborator1123" - assert admin_response_data[0] == "editor-collaborator1123" - assert editor_response_data[0] == "editor-collaborator1123" - assert viewer_response_data[0] == "editor-collaborator1123" + assert response_data[0]["name"] == "collaborator1123" + assert response_data[0]["identifier"] == "collaborator1123" + assert response_data[0]["scheme"] == "collaborator1123" + assert response_data[0]["scheme_uri"] == "collaborator1123" + + assert admin_response_data[0]["name"] == "collaborator1123" + assert admin_response_data[0]["identifier"] == "collaborator1123" + assert admin_response_data[0]["scheme"] == "collaborator1123" + assert admin_response_data[0]["scheme_uri"] == "collaborator1123" + + assert editor_response_data[0]["name"] == "collaborator1123" + assert editor_response_data[0]["identifier"] == "collaborator1123" + assert editor_response_data[0]["scheme"] == "collaborator1123" + assert editor_response_data[0]["scheme_uri"] == "collaborator1123" + + assert viewer_response_data[0]["name"] == "collaborator1123" + assert viewer_response_data[0]["identifier"] == "collaborator1123" + assert viewer_response_data[0]["scheme"] == "collaborator1123" + assert viewer_response_data[0]["scheme_uri"] == "collaborator1123" + + assert response_data[1]["name"] == "admin collaborator1123" + assert response_data[1]["identifier"] == "collaborator1123" + assert response_data[1]["scheme"] == "collaborator1123" + assert response_data[1]["scheme_uri"] == "collaborator1123" + + assert admin_response_data[1]["name"] == "admin collaborator1123" + assert admin_response_data[1]["identifier"] == "collaborator1123" + assert admin_response_data[1]["scheme"] == "collaborator1123" + assert admin_response_data[1]["scheme_uri"] == "collaborator1123" + + assert editor_response_data[1]["name"] == "admin collaborator1123" + assert editor_response_data[1]["identifier"] == "collaborator1123" + assert editor_response_data[1]["scheme"] == "collaborator1123" + assert editor_response_data[1]["scheme_uri"] == "collaborator1123" + + assert viewer_response_data[1]["name"] == "admin collaborator1123" + assert viewer_response_data[1]["identifier"] == "collaborator1123" + assert viewer_response_data[1]["scheme"] == "collaborator1123" + assert viewer_response_data[1]["scheme_uri"] == "collaborator1123" + + assert response_data[2]["name"] == "editor collaborator1123" + assert response_data[2]["identifier"] == "collaborator1123" + assert response_data[2]["scheme"] == "collaborator1123" + assert response_data[2]["scheme_uri"] == "collaborator1123" + + assert admin_response_data[2]["name"] == "editor collaborator1123" + assert admin_response_data[2]["identifier"] == "collaborator1123" + assert admin_response_data[2]["scheme"] == "collaborator1123" + assert admin_response_data[2]["scheme_uri"] == "collaborator1123" + + assert editor_response_data[2]["name"] == "editor collaborator1123" + assert editor_response_data[2]["identifier"] == "collaborator1123" + assert editor_response_data[2]["scheme"] == "collaborator1123" + assert editor_response_data[2]["scheme_uri"] == "collaborator1123" + + assert viewer_response_data[2]["name"] == "editor collaborator1123" + assert viewer_response_data[2]["identifier"] == "collaborator1123" + assert viewer_response_data[2]["scheme"] == "collaborator1123" + assert viewer_response_data[2]["scheme_uri"] == "collaborator1123" + + +def test_delete_collaborators_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + collaborators_id = pytest.global_collaborators_id + admin_collaborators_id = pytest.global_admin_collaborators_id_admin + editor_collaborators_id = pytest.global_editor_collaborators_id_editor + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/collaborators/{collaborators_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/collaborators/{collaborators_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/collaborators/{admin_collaborators_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/collaborators/{editor_collaborators_id}" + ) -# ------------------- CONDITIONS METADATA ------------------- # -def test_put_conditions_metadata(clients): + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# # ------------------- CONDITIONS METADATA ------------------- # +def test_post_conditions_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (POST) @@ -727,59 +914,89 @@ def test_put_conditions_metadata(clients): _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.put( + response = _logged_in_client.post( f"/study/{study_id}/metadata/conditions", json=[ - "true", - "conditions string", - "keywords string", - "size string", + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } ], ) - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) + pytest.global_conditions_id = response_data[0]["id"] - assert response_data[0] == "true" - assert response_data[1] == "conditions string" - assert response_data[2] == "keywords string" - assert response_data[3] == "size string" + assert response_data[0]["name"] == "condition" + assert response_data[0]["classification_code"] == "classification code" + assert response_data[0]["scheme"] == "scheme" + assert response_data[0]["scheme_uri"] == "scheme uri" + assert response_data[0]["condition_uri"] == "condition" - admin_response = _admin_client.put( + admin_response = _admin_client.post( f"/study/{study_id}/metadata/conditions", json=[ - "true", - "admin-conditions string", - "admin-keywords string", - "admin-size string", + { + "name": "admin condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } ], ) - assert admin_response.status_code == 200 + assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) + pytest.global_admin_conditions_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0] == "true" - assert admin_response_data[1] == "admin-conditions string" - assert admin_response_data[2] == "admin-keywords string" - assert admin_response_data[3] == "admin-size string" + assert admin_response_data[0]["name"] == "admin condition" + assert admin_response_data[0]["classification_code"] == "classification code" + assert admin_response_data[0]["scheme"] == "scheme" + assert admin_response_data[0]["scheme_uri"] == "scheme uri" + assert admin_response_data[0]["condition_uri"] == "condition" - editor_response = _editor_client.put( + editor_response = _editor_client.post( f"/study/{study_id}/metadata/conditions", json=[ - "true", - "editor-conditions string", - "editor-keywords string", - "editor-size string", + { + "name": "editor condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } ], ) - assert editor_response.status_code == 200 + assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) + pytest.global_editor_conditions_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["name"] == "editor condition" + assert editor_response_data[0]["classification_code"] == "classification code" + assert editor_response_data[0]["scheme"] == "scheme" + assert editor_response_data[0]["scheme_uri"] == "scheme uri" + assert editor_response_data[0]["condition_uri"] == "condition" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/conditions", + json=[ + { + "name": "editor condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + ) - assert editor_response_data[0] == "true" - assert editor_response_data[1] == "editor-conditions string" - assert editor_response_data[2] == "editor-keywords string" - assert editor_response_data[3] == "editor-size string" + assert viewer_response.status_code == 403 def test_get_conditions_metadata(clients): @@ -806,29 +1023,112 @@ def test_get_conditions_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0] == "true" - assert response_data[1] == "editor-conditions string" - assert response_data[2] == "editor-keywords string" - assert response_data[3] == "editor-size string" - - assert admin_response_data[0] == "true" - assert admin_response_data[1] == "editor-conditions string" - assert admin_response_data[2] == "editor-keywords string" - assert admin_response_data[3] == "editor-size string" + assert response_data[0]["name"] == "condition" + assert response_data[0]["classification_code"] == "classification code" + assert response_data[0]["scheme"] == "scheme" + assert response_data[0]["scheme_uri"] == "scheme uri" + assert response_data[0]["condition_uri"] == "condition" + + assert admin_response_data[0]["name"] == "condition" + assert admin_response_data[0]["classification_code"] == "classification code" + assert admin_response_data[0]["scheme"] == "scheme" + assert admin_response_data[0]["scheme_uri"] == "scheme uri" + assert admin_response_data[0]["condition_uri"] == "condition" + + assert editor_response_data[0]["name"] == "condition" + assert editor_response_data[0]["classification_code"] == "classification code" + assert editor_response_data[0]["scheme"] == "scheme" + assert editor_response_data[0]["scheme_uri"] == "scheme uri" + assert editor_response_data[0]["condition_uri"] == "condition" + + assert viewer_response_data[0]["name"] == "condition" + assert viewer_response_data[0]["classification_code"] == "classification code" + assert viewer_response_data[0]["scheme"] == "scheme" + assert viewer_response_data[0]["scheme_uri"] == "scheme uri" + assert viewer_response_data[0]["condition_uri"] == "condition" + + assert response_data[1]["name"] == "admin condition" + assert response_data[1]["classification_code"] == "classification code" + assert response_data[1]["scheme"] == "scheme" + assert response_data[1]["scheme_uri"] == "scheme uri" + assert response_data[1]["condition_uri"] == "condition" + + assert admin_response_data[1]["name"] == "admin condition" + assert admin_response_data[1]["classification_code"] == "classification code" + assert admin_response_data[1]["scheme"] == "scheme" + assert admin_response_data[1]["scheme_uri"] == "scheme uri" + assert admin_response_data[1]["condition_uri"] == "condition" + + assert editor_response_data[1]["name"] == "admin condition" + assert editor_response_data[1]["classification_code"] == "classification code" + assert editor_response_data[1]["scheme"] == "scheme" + assert editor_response_data[1]["scheme_uri"] == "scheme uri" + assert editor_response_data[1]["condition_uri"] == "condition" + + assert viewer_response_data[1]["name"] == "admin condition" + assert viewer_response_data[1]["classification_code"] == "classification code" + assert viewer_response_data[1]["scheme"] == "scheme" + assert viewer_response_data[1]["scheme_uri"] == "scheme uri" + assert viewer_response_data[1]["condition_uri"] == "condition" + + assert response_data[2]["name"] == "editor condition" + assert response_data[2]["classification_code"] == "classification code" + assert response_data[2]["scheme"] == "scheme" + assert response_data[2]["scheme_uri"] == "scheme uri" + assert response_data[2]["condition_uri"] == "condition" + + assert admin_response_data[2]["name"] == "editor condition" + assert admin_response_data[2]["classification_code"] == "classification code" + assert admin_response_data[2]["scheme"] == "scheme" + assert admin_response_data[2]["scheme_uri"] == "scheme uri" + assert admin_response_data[2]["condition_uri"] == "condition" + + assert editor_response_data[2]["name"] == "editor condition" + assert editor_response_data[2]["classification_code"] == "classification code" + assert editor_response_data[2]["scheme"] == "scheme" + assert editor_response_data[2]["scheme_uri"] == "scheme uri" + assert editor_response_data[2]["condition_uri"] == "condition" + + assert viewer_response_data[2]["name"] == "editor condition" + assert viewer_response_data[2]["classification_code"] == "classification code" + assert viewer_response_data[2]["scheme"] == "scheme" + assert viewer_response_data[2]["scheme_uri"] == "scheme uri" + assert viewer_response_data[2]["condition_uri"] == "condition" + + +def test_delete_conditions_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + conditions_id = pytest.global_conditions_id + admin_conditions_id = pytest.global_admin_conditions_id_admin + editor_conditions_id = pytest.global_editor_conditions_id_editor - assert editor_response_data[0] == "true" - assert editor_response_data[1] == "editor-conditions string" - assert editor_response_data[2] == "editor-keywords string" - assert editor_response_data[3] == "editor-size string" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/conditions/{conditions_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/conditions/{conditions_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/conditions/{admin_conditions_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/conditions/{editor_conditions_id}" + ) - assert viewer_response_data[0] == "true" - assert viewer_response_data[1] == "editor-conditions string" - assert viewer_response_data[2] == "editor-keywords string" - assert viewer_response_data[3] == "editor-size string" + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- KEYWORDS METADATA ------------------- # -def test_put_keywords_metadata(clients): +def test_post_keywords_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (POST) @@ -837,59 +1137,87 @@ def test_put_keywords_metadata(clients): _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.put( + response = _logged_in_client.post( f"/study/{study_id}/metadata/keywords", json=[ - "true", - "conditions string", - "keywords string", - "size string", + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } ], ) - - assert response.status_code == 200 + assert response.status_code == 201 response_data = json.loads(response.data) + pytest.global_keywords_id = response_data[0]["id"] - assert response_data[0] == "true" - assert response_data[1] == "conditions string" - assert response_data[2] == "keywords string" - assert response_data[3] == "size string" + assert response_data[0]["name"] == "keywords" + assert response_data[0]["classification_code"] == "classification code" + assert response_data[0]["scheme"] == "scheme" + assert response_data[0]["scheme_uri"] == "scheme uri" + assert response_data[0]["keyword_uri"] == "keywords" - admin_response = _admin_client.put( + admin_response = _admin_client.post( f"/study/{study_id}/metadata/keywords", json=[ - "true", - "admin-conditions string", - "admin-keywords string", - "admin-size string", + { + "name": "admin keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } ], ) - assert admin_response.status_code == 200 + assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) + pytest.global_admin_keywords_id_admin = admin_response_data[0]["id"] + assert admin_response_data[0]["name"] == "admin keywords" + assert admin_response_data[0]["classification_code"] == "classification code" + assert admin_response_data[0]["scheme"] == "scheme" + assert admin_response_data[0]["scheme_uri"] == "scheme uri" + assert admin_response_data[0]["keyword_uri"] == "keywords" - assert admin_response_data[0] == "true" - assert admin_response_data[1] == "admin-conditions string" - assert admin_response_data[2] == "admin-keywords string" - assert admin_response_data[3] == "admin-size string" - - editor_response = _editor_client.put( + editor_response = _editor_client.post( f"/study/{study_id}/metadata/keywords", json=[ - "true", - "editor-conditions string", - "editor-keywords string", - "editor-size string", + { + "name": "editor keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } ], ) - assert editor_response.status_code == 200 + assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) + pytest.global_editor_keywords_id_editor = editor_response_data[0]["id"] + + assert editor_response_data[0]["name"] == "editor keywords" + assert editor_response_data[0]["classification_code"] == "classification code" + assert editor_response_data[0]["scheme"] == "scheme" + assert editor_response_data[0]["scheme_uri"] == "scheme uri" + assert editor_response_data[0]["keyword_uri"] == "keywords" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/metadata/keywords", + json=[ + { + "name": "editor keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + ) - assert editor_response_data[0] == "true" - assert editor_response_data[1] == "editor-conditions string" - assert editor_response_data[2] == "editor-keywords string" - assert editor_response_data[3] == "editor-size string" + assert viewer_response.status_code == 403 def test_get_keywords_metadata(clients): @@ -916,25 +1244,108 @@ def test_get_keywords_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0] == "true" - assert response_data[1] == "editor-conditions string" - assert response_data[2] == "editor-keywords string" - assert response_data[3] == "editor-size string" - - assert admin_response_data[0] == "true" - assert admin_response_data[1] == "editor-conditions string" - assert admin_response_data[2] == "editor-keywords string" - assert admin_response_data[3] == "editor-size string" + assert response_data[0]["name"] == "keywords" + assert response_data[0]["classification_code"] == "classification code" + assert response_data[0]["scheme"] == "scheme" + assert response_data[0]["scheme_uri"] == "scheme uri" + assert response_data[0]["keyword_uri"] == "keywords" + + assert admin_response_data[0]["name"] == "keywords" + assert admin_response_data[0]["classification_code"] == "classification code" + assert admin_response_data[0]["scheme"] == "scheme" + assert admin_response_data[0]["scheme_uri"] == "scheme uri" + assert admin_response_data[0]["keyword_uri"] == "keywords" + + assert editor_response_data[0]["name"] == "keywords" + assert editor_response_data[0]["classification_code"] == "classification code" + assert editor_response_data[0]["scheme"] == "scheme" + assert editor_response_data[0]["scheme_uri"] == "scheme uri" + assert editor_response_data[0]["keyword_uri"] == "keywords" + + assert viewer_response_data[0]["name"] == "keywords" + assert viewer_response_data[0]["classification_code"] == "classification code" + assert viewer_response_data[0]["scheme"] == "scheme" + assert viewer_response_data[0]["scheme_uri"] == "scheme uri" + assert viewer_response_data[0]["keyword_uri"] == "keywords" + + assert response_data[1]["name"] == "admin keywords" + assert response_data[1]["classification_code"] == "classification code" + assert response_data[1]["scheme"] == "scheme" + assert response_data[1]["scheme_uri"] == "scheme uri" + assert response_data[1]["keyword_uri"] == "keywords" + + assert admin_response_data[1]["name"] == "admin keywords" + assert admin_response_data[1]["classification_code"] == "classification code" + assert admin_response_data[1]["scheme"] == "scheme" + assert admin_response_data[1]["scheme_uri"] == "scheme uri" + assert admin_response_data[1]["keyword_uri"] == "keywords" + + assert editor_response_data[1]["name"] == "admin keywords" + assert editor_response_data[1]["classification_code"] == "classification code" + assert editor_response_data[1]["scheme"] == "scheme" + assert editor_response_data[1]["scheme_uri"] == "scheme uri" + assert editor_response_data[1]["keyword_uri"] == "keywords" + + assert viewer_response_data[1]["name"] == "admin keywords" + assert viewer_response_data[1]["classification_code"] == "classification code" + assert viewer_response_data[1]["scheme"] == "scheme" + assert viewer_response_data[1]["scheme_uri"] == "scheme uri" + assert viewer_response_data[1]["keyword_uri"] == "keywords" + + assert response_data[2]["name"] == "editor keywords" + assert response_data[2]["classification_code"] == "classification code" + assert response_data[2]["scheme"] == "scheme" + assert response_data[2]["scheme_uri"] == "scheme uri" + assert response_data[2]["keyword_uri"] == "keywords" + + assert admin_response_data[2]["name"] == "editor keywords" + assert admin_response_data[2]["classification_code"] == "classification code" + assert admin_response_data[2]["scheme"] == "scheme" + assert admin_response_data[2]["scheme_uri"] == "scheme uri" + assert admin_response_data[2]["keyword_uri"] == "keywords" + + assert editor_response_data[2]["name"] == "editor keywords" + assert editor_response_data[2]["classification_code"] == "classification code" + assert editor_response_data[2]["scheme"] == "scheme" + assert editor_response_data[2]["scheme_uri"] == "scheme uri" + assert editor_response_data[2]["keyword_uri"] == "keywords" + + assert viewer_response_data[2]["name"] == "editor keywords" + assert viewer_response_data[2]["classification_code"] == "classification code" + assert viewer_response_data[2]["scheme"] == "scheme" + assert viewer_response_data[2]["scheme_uri"] == "scheme uri" + assert viewer_response_data[2]["keyword_uri"] == "keywords" + + +def test_delete_keywords_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + keywords_id = pytest.global_keywords_id + admin_keywords_id = pytest.global_admin_keywords_id_admin + editor_keywords_id = pytest.global_editor_keywords_id_editor - assert editor_response_data[0] == "true" - assert editor_response_data[1] == "editor-conditions string" - assert editor_response_data[2] == "editor-keywords string" - assert editor_response_data[3] == "editor-size string" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/keywords/{keywords_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/keywords/{keywords_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/keywords/{admin_keywords_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/keywords/{editor_keywords_id}" + ) - assert viewer_response_data[0] == "true" - assert viewer_response_data[1] == "editor-conditions string" - assert viewer_response_data[2] == "editor-keywords string" - assert viewer_response_data[3] == "editor-size string" + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- DESCRIPTION METADATA ------------------- # @@ -1067,7 +1478,7 @@ def test_put_design_metadata(clients): "bio_spec_retention": "None Retained", "bio_spec_description": "dfasdf", "target_duration": "rewrwe", - "number_groups_cohorts": 1, + "is_patient_registry": "yes", }, ) @@ -1094,7 +1505,7 @@ def test_put_design_metadata(clients): assert response_data["bio_spec_retention"] == "None Retained" assert response_data["bio_spec_description"] == "dfasdf" assert response_data["target_duration"] == "rewrwe" - assert response_data["number_groups_cohorts"] == 1 + assert response_data["is_patient_registry"] == "yes" admin_response = _admin_client.put( f"/study/{study_id}/metadata/design", @@ -1116,7 +1527,7 @@ def test_put_design_metadata(clients): "bio_spec_retention": "None Retained", "bio_spec_description": "dfasdf", "target_duration": "rewrwe", - "number_groups_cohorts": 1, + "is_patient_registry": "yes", }, ) @@ -1146,7 +1557,7 @@ def test_put_design_metadata(clients): assert admin_response_data["bio_spec_retention"] == "None Retained" assert admin_response_data["bio_spec_description"] == "dfasdf" assert admin_response_data["target_duration"] == "rewrwe" - assert admin_response_data["number_groups_cohorts"] == 1 + assert admin_response_data["is_patient_registry"] == "yes" editor_response = _editor_client.put( f"/study/{study_id}/metadata/design", @@ -1168,7 +1579,7 @@ def test_put_design_metadata(clients): "bio_spec_retention": "None Retained", "bio_spec_description": "dfasdf", "target_duration": "rewrwe", - "number_groups_cohorts": 1, + "is_patient_registry": "yes", }, ) @@ -1198,7 +1609,7 @@ def test_put_design_metadata(clients): assert editor_response_data["bio_spec_retention"] == "None Retained" assert editor_response_data["bio_spec_description"] == "dfasdf" assert editor_response_data["target_duration"] == "rewrwe" - assert editor_response_data["number_groups_cohorts"] == 1 + assert editor_response_data["is_patient_registry"] == "yes" viewer_response = _viewer_client.put( f"/study/{study_id}/metadata/design", @@ -1220,7 +1631,7 @@ def test_put_design_metadata(clients): "bio_spec_retention": "None Retained", "bio_spec_description": "dfasdf", "target_duration": "rewrwe", - "number_groups_cohorts": 1, + "is_patient_registry": "yes", }, ) @@ -1271,7 +1682,7 @@ def test_get_design_metadata(clients): assert response_data["bio_spec_retention"] == "None Retained" assert response_data["bio_spec_description"] == "dfasdf" assert response_data["target_duration"] == "rewrwe" - assert response_data["number_groups_cohorts"] == 1 + assert response_data["is_patient_registry"] == "yes" assert admin_response_data["design_allocation"] == "editor-dfasdfasd" assert admin_response_data["study_type"] == "Interventional" @@ -1296,7 +1707,7 @@ def test_get_design_metadata(clients): assert admin_response_data["bio_spec_retention"] == "None Retained" assert admin_response_data["bio_spec_description"] == "dfasdf" assert admin_response_data["target_duration"] == "rewrwe" - assert admin_response_data["number_groups_cohorts"] == 1 + assert admin_response_data["is_patient_registry"] == "yes" assert editor_response_data["design_allocation"] == "editor-dfasdfasd" assert editor_response_data["study_type"] == "Interventional" @@ -1321,7 +1732,7 @@ def test_get_design_metadata(clients): assert editor_response_data["bio_spec_retention"] == "None Retained" assert editor_response_data["bio_spec_description"] == "dfasdf" assert editor_response_data["target_duration"] == "rewrwe" - assert editor_response_data["number_groups_cohorts"] == 1 + assert editor_response_data["is_patient_registry"] == "yes" assert viewer_response_data["design_allocation"] == "editor-dfasdfasd" assert viewer_response_data["study_type"] == "Interventional" @@ -1346,7 +1757,7 @@ def test_get_design_metadata(clients): assert viewer_response_data["bio_spec_retention"] == "None Retained" assert viewer_response_data["bio_spec_description"] == "dfasdf" assert viewer_response_data["target_duration"] == "rewrwe" - assert viewer_response_data["number_groups_cohorts"] == 1 + assert viewer_response_data["is_patient_registry"] == "yes" # ------------------- ELIGIBILITY METADATA ------------------- # @@ -1362,7 +1773,7 @@ def test_put_eligibility_metadata(clients): response = _logged_in_client.put( f"/study/{study_id}/metadata/eligibility", json={ - "gender": "All", + "sex": "All", "gender_based": "Yes", "gender_description": "none", "minimum_age_value": 18, @@ -1380,7 +1791,7 @@ def test_put_eligibility_metadata(clients): assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["gender"] == "All" + assert response_data["sex"] == "All" assert response_data["gender_based"] == "Yes" assert response_data["gender_description"] == "none" assert response_data["minimum_age_value"] == 18 @@ -1396,7 +1807,7 @@ def test_put_eligibility_metadata(clients): admin_response = _admin_client.put( f"/study/{study_id}/metadata/eligibility", json={ - "gender": "All", + "sex": "All", "gender_based": "Yes", "gender_description": "admin-none", "minimum_age_value": 18, @@ -1414,7 +1825,7 @@ def test_put_eligibility_metadata(clients): assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - assert admin_response_data["gender"] == "All" + assert admin_response_data["sex"] == "All" assert admin_response_data["gender_based"] == "Yes" assert admin_response_data["gender_description"] == "admin-none" assert admin_response_data["minimum_age_value"] == 18 @@ -1430,7 +1841,7 @@ def test_put_eligibility_metadata(clients): editor_response = _editor_client.put( f"/study/{study_id}/metadata/eligibility", json={ - "gender": "All", + "sex": "All", "gender_based": "Yes", "gender_description": "editor-none", "minimum_age_value": 18, @@ -1448,7 +1859,7 @@ def test_put_eligibility_metadata(clients): assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - assert editor_response_data["gender"] == "All" + assert editor_response_data["sex"] == "All" assert editor_response_data["gender_based"] == "Yes" assert editor_response_data["gender_description"] == "editor-none" assert editor_response_data["minimum_age_value"] == 18 @@ -1464,7 +1875,7 @@ def test_put_eligibility_metadata(clients): viewer_response = _viewer_client.put( f"/study/{study_id}/metadata/eligibility", json={ - "gender": "All", + "sex": "All", "gender_based": "Yes", "gender_description": "viewer-none", "minimum_age_value": 18, @@ -1506,7 +1917,7 @@ def test_get_eligibility_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data["gender"] == "All" + assert response_data["sex"] == "All" assert response_data["gender_based"] == "Yes" assert response_data["gender_description"] == "editor-none" assert response_data["minimum_age_value"] == 18 @@ -1519,7 +1930,7 @@ def test_get_eligibility_metadata(clients): assert response_data["study_population"] == "study_population" assert response_data["sampling_method"] == "Probability Sample" - assert admin_response_data["gender"] == "All" + assert admin_response_data["sex"] == "All" assert admin_response_data["gender_based"] == "Yes" assert admin_response_data["gender_description"] == "editor-none" assert admin_response_data["minimum_age_value"] == 18 @@ -1532,7 +1943,7 @@ def test_get_eligibility_metadata(clients): assert admin_response_data["study_population"] == "study_population" assert admin_response_data["sampling_method"] == "Probability Sample" - assert editor_response_data["gender"] == "All" + assert editor_response_data["sex"] == "All" assert editor_response_data["gender_based"] == "Yes" assert editor_response_data["gender_description"] == "editor-none" assert editor_response_data["minimum_age_value"] == 18 @@ -1545,7 +1956,7 @@ def test_get_eligibility_metadata(clients): assert editor_response_data["study_population"] == "study_population" assert editor_response_data["sampling_method"] == "Probability Sample" - assert viewer_response_data["gender"] == "All" + assert viewer_response_data["sex"] == "All" assert viewer_response_data["gender_based"] == "Yes" assert viewer_response_data["gender_description"] == "editor-none" assert viewer_response_data["minimum_age_value"] == 18 @@ -1969,372 +2380,67 @@ def test_get_intervention_metadata(clients): assert admin_response_data[1]["arm_group_label_list"] == ["test", "one"] assert admin_response_data[1]["other_name_list"] == ["uhh", "yes"] assert admin_response_data[2]["type"] == "Device" - assert admin_response_data[2]["name"] == "editor-name test" - assert admin_response_data[2]["description"] == "desc" - assert admin_response_data[2]["arm_group_label_list"] == ["test", "one"] - assert admin_response_data[2]["other_name_list"] == ["uhh", "yes"] - - assert editor_response_data[0]["type"] == "Device" - assert editor_response_data[0]["name"] == "name test" - assert editor_response_data[0]["description"] == "desc" - assert editor_response_data[0]["arm_group_label_list"] == ["test", "one"] - assert editor_response_data[0]["other_name_list"] == ["uhh", "yes"] - assert editor_response_data[1]["type"] == "Device" - assert editor_response_data[1]["name"] == "admin-name test" - assert editor_response_data[1]["description"] == "desc" - assert editor_response_data[1]["arm_group_label_list"] == ["test", "one"] - assert editor_response_data[1]["other_name_list"] == ["uhh", "yes"] - assert editor_response_data[2]["type"] == "Device" - assert editor_response_data[2]["name"] == "editor-name test" - assert editor_response_data[2]["description"] == "desc" - assert editor_response_data[2]["arm_group_label_list"] == ["test", "one"] - assert editor_response_data[2]["other_name_list"] == ["uhh", "yes"] - - assert viewer_response_data[0]["type"] == "Device" - assert viewer_response_data[0]["name"] == "name test" - assert viewer_response_data[0]["description"] == "desc" - assert viewer_response_data[0]["arm_group_label_list"] == ["test", "one"] - assert viewer_response_data[0]["other_name_list"] == ["uhh", "yes"] - assert viewer_response_data[1]["type"] == "Device" - assert viewer_response_data[1]["name"] == "admin-name test" - assert viewer_response_data[1]["description"] == "desc" - assert viewer_response_data[1]["arm_group_label_list"] == ["test", "one"] - assert viewer_response_data[1]["other_name_list"] == ["uhh", "yes"] - assert viewer_response_data[2]["type"] == "Device" - assert viewer_response_data[2]["name"] == "editor-name test" - assert viewer_response_data[2]["description"] == "desc" - assert viewer_response_data[2]["arm_group_label_list"] == ["test", "one"] - assert viewer_response_data[2]["other_name_list"] == ["uhh", "yes"] - - -def test_delete_intervention_metadata(clients): - """ - Given a Flask application configured for testing, study ID, dataset ID and intervention ID - WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (DELETE) - THEN check that the response is valid and deletes the intervention metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - intervention_id = pytest.global_intervention_id - a_intervention_id = pytest.global_intervention_id_admin - e_intervention_id = pytest.global_intervention_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/intervention/{intervention_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/metadata/intervention/{intervention_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/intervention/{a_intervention_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/intervention/{e_intervention_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- IPD SHARING METADATA ------------------- # -def test_put_ipdsharing_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (PUT) - THEN check that the response is valid and updates the ipdsharing metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.put( - f"/study/{study_id}/metadata/ipdsharing", - json={ - "ipd_sharing": "Yes", - "ipd_sharing_description": "yes", - "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], - "ipd_sharing_time_frame": "uh", - "ipd_sharing_access_criteria": "Study Protocol", - "ipd_sharing_url": "1", - }, - ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["ipd_sharing"] == "Yes" - assert response_data["ipd_sharing_description"] == "yes" - assert response_data["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - assert response_data["ipd_sharing_time_frame"] == "uh" - assert response_data["ipd_sharing_access_criteria"] == "Study Protocol" - assert response_data["ipd_sharing_url"] == "1" - - admin_response = _admin_client.put( - f"/study/{study_id}/metadata/ipdsharing", - json={ - "ipd_sharing": "Yes", - "ipd_sharing_description": "admin-yes", - "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], - "ipd_sharing_time_frame": "uh", - "ipd_sharing_access_criteria": "Study Protocol", - "ipd_sharing_url": "1", - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["ipd_sharing"] == "Yes" - assert admin_response_data["ipd_sharing_description"] == "admin-yes" - assert admin_response_data["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - assert admin_response_data["ipd_sharing_time_frame"] == "uh" - assert admin_response_data["ipd_sharing_access_criteria"] == "Study Protocol" - assert admin_response_data["ipd_sharing_url"] == "1" - - editor_response = _editor_client.put( - f"/study/{study_id}/metadata/ipdsharing", - json={ - "ipd_sharing": "Yes", - "ipd_sharing_description": "editor-yes", - "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], - "ipd_sharing_time_frame": "uh", - "ipd_sharing_access_criteria": "Study Protocol", - "ipd_sharing_url": "1", - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["ipd_sharing"] == "Yes" - assert editor_response_data["ipd_sharing_description"] == "editor-yes" - assert editor_response_data["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - assert editor_response_data["ipd_sharing_time_frame"] == "uh" - assert editor_response_data["ipd_sharing_access_criteria"] == "Study Protocol" - assert editor_response_data["ipd_sharing_url"] == "1" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/metadata/ipdsharing", - json={ - "ipd_sharing": "Yes", - "ipd_sharing_description": "viewer-yes", - "ipd_sharing_info_type_list": ["Study Protocol", "Analytical Code"], - "ipd_sharing_time_frame": "uh", - "ipd_sharing_access_criteria": "Study Protocol", - "ipd_sharing_url": "1", - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_ipdsharing_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/ipdsharing' endpoint is requested (GET) - THEN check that the response is valid and retrieves the ipdsharing metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/ipdsharing") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/ipdsharing") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/ipdsharing") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/ipdsharing") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["ipd_sharing"] == "Yes" - assert response_data["ipd_sharing_description"] == "editor-yes" - assert response_data["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - assert response_data["ipd_sharing_time_frame"] == "uh" - assert response_data["ipd_sharing_access_criteria"] == "Study Protocol" - assert response_data["ipd_sharing_url"] == "1" - - assert admin_response_data["ipd_sharing"] == "Yes" - assert admin_response_data["ipd_sharing_description"] == "editor-yes" - assert admin_response_data["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - assert admin_response_data["ipd_sharing_time_frame"] == "uh" - assert admin_response_data["ipd_sharing_access_criteria"] == "Study Protocol" - assert admin_response_data["ipd_sharing_url"] == "1" - - assert editor_response_data["ipd_sharing"] == "Yes" - assert editor_response_data["ipd_sharing_description"] == "editor-yes" - assert editor_response_data["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - assert editor_response_data["ipd_sharing_time_frame"] == "uh" - assert editor_response_data["ipd_sharing_access_criteria"] == "Study Protocol" - assert editor_response_data["ipd_sharing_url"] == "1" - - assert viewer_response_data["ipd_sharing"] == "Yes" - assert viewer_response_data["ipd_sharing_description"] == "editor-yes" - assert viewer_response_data["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - assert viewer_response_data["ipd_sharing_time_frame"] == "uh" - assert viewer_response_data["ipd_sharing_access_criteria"] == "Study Protocol" - assert viewer_response_data["ipd_sharing_url"] == "1" - - -# ------------------- LINK METADATA ------------------- # -def test_post_link_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/link' endpoint is requested (POST) - THEN check that the response is valid and creates the link metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/link", - json=[{"url": "google.com", "title": "google link"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_link_id = response_data[0]["id"] - - assert response_data[0]["url"] == "google.com" - assert response_data[0]["title"] == "google link" - - admin_response = _admin_client.post( - f"/study/{study_id}/metadata/link", - json=[{"url": "admin-google.com", "title": "admin-google link"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_link_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["url"] == "admin-google.com" - assert admin_response_data[0]["title"] == "admin-google link" - - editor_response = _editor_client.post( - f"/study/{study_id}/metadata/link", - json=[{"url": "editor-google.com", "title": "editor-google link"}], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_link_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["url"] == "editor-google.com" - assert editor_response_data[0]["title"] == "editor-google link" + assert admin_response_data[2]["name"] == "editor-name test" + assert admin_response_data[2]["description"] == "desc" + assert admin_response_data[2]["arm_group_label_list"] == ["test", "one"] + assert admin_response_data[2]["other_name_list"] == ["uhh", "yes"] - viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/link", - json=[{"url": "viewer-google.com", "title": "viewer-google link"}], - ) + assert editor_response_data[0]["type"] == "Device" + assert editor_response_data[0]["name"] == "name test" + assert editor_response_data[0]["description"] == "desc" + assert editor_response_data[0]["arm_group_label_list"] == ["test", "one"] + assert editor_response_data[0]["other_name_list"] == ["uhh", "yes"] + assert editor_response_data[1]["type"] == "Device" + assert editor_response_data[1]["name"] == "admin-name test" + assert editor_response_data[1]["description"] == "desc" + assert editor_response_data[1]["arm_group_label_list"] == ["test", "one"] + assert editor_response_data[1]["other_name_list"] == ["uhh", "yes"] + assert editor_response_data[2]["type"] == "Device" + assert editor_response_data[2]["name"] == "editor-name test" + assert editor_response_data[2]["description"] == "desc" + assert editor_response_data[2]["arm_group_label_list"] == ["test", "one"] + assert editor_response_data[2]["other_name_list"] == ["uhh", "yes"] - assert viewer_response.status_code == 403 + assert viewer_response_data[0]["type"] == "Device" + assert viewer_response_data[0]["name"] == "name test" + assert viewer_response_data[0]["description"] == "desc" + assert viewer_response_data[0]["arm_group_label_list"] == ["test", "one"] + assert viewer_response_data[0]["other_name_list"] == ["uhh", "yes"] + assert viewer_response_data[1]["type"] == "Device" + assert viewer_response_data[1]["name"] == "admin-name test" + assert viewer_response_data[1]["description"] == "desc" + assert viewer_response_data[1]["arm_group_label_list"] == ["test", "one"] + assert viewer_response_data[1]["other_name_list"] == ["uhh", "yes"] + assert viewer_response_data[2]["type"] == "Device" + assert viewer_response_data[2]["name"] == "editor-name test" + assert viewer_response_data[2]["description"] == "desc" + assert viewer_response_data[2]["arm_group_label_list"] == ["test", "one"] + assert viewer_response_data[2]["other_name_list"] == ["uhh", "yes"] -def test_get_link_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/link' endpoint is requested (GET) - THEN check that the response is valid and retrieves the link metadata +def test_delete_intervention_metadata(clients): """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/link") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/link") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/link") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/link") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data[0]["url"] == "google.com" - assert response_data[0]["title"] == "google link" - assert response_data[1]["url"] == "admin-google.com" - assert response_data[1]["title"] == "admin-google link" - assert response_data[2]["url"] == "editor-google.com" - assert response_data[2]["title"] == "editor-google link" - - assert admin_response_data[0]["url"] == "google.com" - assert admin_response_data[0]["title"] == "google link" - assert admin_response_data[1]["url"] == "admin-google.com" - assert admin_response_data[1]["title"] == "admin-google link" - assert admin_response_data[2]["url"] == "editor-google.com" - assert admin_response_data[2]["title"] == "editor-google link" - - assert editor_response_data[0]["url"] == "google.com" - assert editor_response_data[0]["title"] == "google link" - assert editor_response_data[1]["url"] == "admin-google.com" - assert editor_response_data[1]["title"] == "admin-google link" - assert editor_response_data[2]["url"] == "editor-google.com" - assert editor_response_data[2]["title"] == "editor-google link" - - assert viewer_response_data[0]["url"] == "google.com" - assert viewer_response_data[0]["title"] == "google link" - assert viewer_response_data[1]["url"] == "admin-google.com" - assert viewer_response_data[1]["title"] == "admin-google link" - assert viewer_response_data[2]["url"] == "editor-google.com" - assert viewer_response_data[2]["title"] == "editor-google link" - - -def test_delete_link_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and link ID - WHEN the '/study/{study_id}/metadata/link/{link_id}' endpoint is requested (DELETE) - THEN check that the response is valid and deletes the link metadata + Given a Flask application configured for testing, study ID, dataset ID and intervention ID + WHEN the '/study/{study_id}/metadata/intervention' endpoint is requested (DELETE) + THEN check that the response is valid and deletes the intervention metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - link_id = pytest.global_link_id - admin_link_id = pytest.global_link_id_admin - editor_link_id = pytest.global_link_id_editor + intervention_id = pytest.global_intervention_id + a_intervention_id = pytest.global_intervention_id_admin + e_intervention_id = pytest.global_intervention_id_editor viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/link/{link_id}" + f"/study/{study_id}/metadata/intervention/{intervention_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/intervention/{intervention_id}" ) - response = _logged_in_client.delete(f"/study/{study_id}/metadata/link/{link_id}") admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/link/{admin_link_id}" + f"/study/{study_id}/metadata/intervention/{a_intervention_id}" ) editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/link/{editor_link_id}" + f"/study/{study_id}/metadata/intervention/{e_intervention_id}" ) assert viewer_response.status_code == 403 @@ -2594,7 +2700,21 @@ def test_post_overall_official_metadata(clients): response = _logged_in_client.post( f"/study/{study_id}/metadata/overall-official", - json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], + json=[ + { + "first_name": "test", + "last_name": "test", + "degree": "aff", + "identifier": "identifier", + "identifier_scheme": "scheme", + "identifier_scheme_uri": "uri", + "affiliation": "aff", + "affiliation_identifier": "identifier", + "affiliation_identifier_scheme": "scheme", + "affiliation_identifier_scheme_uri": "uri", + "role": "chair", + } + ], ) # Add a one second delay to prevent duplicate timestamps sleep(1) @@ -2603,14 +2723,34 @@ def test_post_overall_official_metadata(clients): response_data = json.loads(response.data) pytest.global_overall_official_id = response_data[0]["id"] - assert response_data[0]["name"] == "test" + assert response_data[0]["first_name"] == "test" + assert response_data[0]["last_name"] == "test" assert response_data[0]["affiliation"] == "aff" - assert response_data[0]["role"] == "Study Chair" + assert response_data[0]["degree"] == "aff" + assert response_data[0]["identifier"] == "identifier" + assert response_data[0]["identifier_scheme"] == "scheme" + assert response_data[0]["identifier_scheme_uri"] == "uri" + assert response_data[0]["affiliation_identifier"] == "identifier" + assert response_data[0]["affiliation_identifier_scheme"] == "scheme" + assert response_data[0]["affiliation_identifier_scheme_uri"] == "uri" + assert response_data[0]["role"] == "chair" admin_response = _admin_client.post( f"/study/{study_id}/metadata/overall-official", json=[ - {"name": "admin-test", "affiliation": "admin-aff", "role": "Study Chair"} + { + "first_name": "admin test", + "last_name": "test", + "degree": "aff", + "identifier": "identifier", + "identifier_scheme": "scheme", + "identifier_scheme_uri": "uri", + "affiliation": "aff", + "affiliation_identifier": "identifier", + "affiliation_identifier_scheme": "scheme", + "affiliation_identifier_scheme_uri": "uri", + "role": "chair", + } ], ) # Add a one second delay to prevent duplicate timestamps @@ -2620,14 +2760,35 @@ def test_post_overall_official_metadata(clients): admin_response_data = json.loads(admin_response.data) pytest.global_overall_official_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0]["name"] == "admin-test" - assert admin_response_data[0]["affiliation"] == "admin-aff" - assert admin_response_data[0]["role"] == "Study Chair" + assert admin_response_data[0]["first_name"] == "admin test" + assert admin_response_data[0]["last_name"] == "test" + assert admin_response_data[0]["affiliation"] == "aff" + assert admin_response_data[0]["degree"] == "aff" + assert admin_response_data[0]["identifier"] == "identifier" + assert admin_response_data[0]["identifier_scheme"] == "scheme" + assert admin_response_data[0]["identifier_scheme_uri"] == "uri" + assert admin_response_data[0]["affiliation"] == "aff" + assert admin_response_data[0]["affiliation_identifier"] == "identifier" + assert admin_response_data[0]["affiliation_identifier_scheme"] == "scheme" + assert admin_response_data[0]["affiliation_identifier_scheme_uri"] == "uri" + assert admin_response_data[0]["role"] == "chair" editor_response = _editor_client.post( f"/study/{study_id}/metadata/overall-official", json=[ - {"name": "editor-test", "affiliation": "editor-aff", "role": "Study Chair"} + { + "first_name": "editor test", + "last_name": "test", + "degree": "aff", + "identifier": "identifier", + "identifier_scheme": "scheme", + "identifier_scheme_uri": "uri", + "affiliation": "aff", + "affiliation_identifier": "identifier", + "affiliation_identifier_scheme": "scheme", + "affiliation_identifier_scheme_uri": "uri", + "role": "chair", + } ], ) @@ -2635,14 +2796,35 @@ def test_post_overall_official_metadata(clients): editor_response_data = json.loads(editor_response.data) pytest.global_overall_official_id_editor = editor_response_data[0]["id"] - assert editor_response_data[0]["name"] == "editor-test" - assert editor_response_data[0]["affiliation"] == "editor-aff" - assert editor_response_data[0]["role"] == "Study Chair" + assert editor_response_data[0]["first_name"] == "editor test" + assert editor_response_data[0]["last_name"] == "test" + assert editor_response_data[0]["affiliation"] == "aff" + assert editor_response_data[0]["degree"] == "aff" + assert editor_response_data[0]["identifier"] == "identifier" + assert editor_response_data[0]["identifier_scheme"] == "scheme" + assert editor_response_data[0]["identifier_scheme_uri"] == "uri" + assert editor_response_data[0]["affiliation"] == "aff" + assert editor_response_data[0]["affiliation_identifier"] == "identifier" + assert editor_response_data[0]["affiliation_identifier_scheme"] == "scheme" + assert editor_response_data[0]["affiliation_identifier_scheme_uri"] == "uri" + assert editor_response_data[0]["role"] == "chair" viewer_response = _viewer_client.post( f"/study/{study_id}/metadata/overall-official", json=[ - {"name": "viewer-test", "affiliation": "viewer-aff", "role": "Study Chair"} + { + "first_name": "editor test", + "last_name": "test", + "degree": "aff", + "identifier": "identifier", + "identifier_scheme": "scheme", + "identifier_scheme_uri": "uri", + "affiliation": "aff", + "affiliation_identifier": "identifier", + "affiliation_identifier_scheme": "scheme", + "affiliation_identifier_scheme_uri": "uri", + "role": "chair", + } ], ) @@ -2673,45 +2855,161 @@ def test_get_overall_official_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["name"] == "test" + assert response_data[0]["first_name"] == "test" + assert response_data[0]["last_name"] == "test" assert response_data[0]["affiliation"] == "aff" - assert response_data[0]["role"] == "Study Chair" - assert response_data[1]["name"] == "admin-test" - assert response_data[1]["affiliation"] == "admin-aff" - assert response_data[1]["role"] == "Study Chair" - assert response_data[2]["name"] == "editor-test" - assert response_data[2]["affiliation"] == "editor-aff" - assert response_data[2]["role"] == "Study Chair" - - assert admin_response_data[0]["name"] == "test" + assert response_data[0]["degree"] == "aff" + assert response_data[0]["identifier"] == "identifier" + assert response_data[0]["identifier_scheme"] == "scheme" + assert response_data[0]["identifier_scheme_uri"] == "uri" + assert response_data[0]["affiliation"] == "aff" + assert response_data[0]["affiliation_identifier"] == "identifier" + assert response_data[0]["affiliation_identifier_scheme"] == "scheme" + assert response_data[0]["affiliation_identifier_scheme_uri"] == "uri" + assert response_data[0]["role"] == "chair" + + assert admin_response_data[0]["first_name"] == "test" + assert admin_response_data[0]["last_name"] == "test" assert admin_response_data[0]["affiliation"] == "aff" - assert admin_response_data[0]["role"] == "Study Chair" - assert admin_response_data[1]["name"] == "admin-test" - assert admin_response_data[1]["affiliation"] == "admin-aff" - assert admin_response_data[1]["role"] == "Study Chair" - assert admin_response_data[2]["name"] == "editor-test" - assert admin_response_data[2]["affiliation"] == "editor-aff" - assert admin_response_data[2]["role"] == "Study Chair" - - assert editor_response_data[0]["name"] == "test" + assert admin_response_data[0]["degree"] == "aff" + assert admin_response_data[0]["identifier"] == "identifier" + assert admin_response_data[0]["identifier_scheme"] == "scheme" + assert admin_response_data[0]["identifier_scheme_uri"] == "uri" + assert admin_response_data[0]["affiliation"] == "aff" + assert admin_response_data[0]["affiliation_identifier"] == "identifier" + assert admin_response_data[0]["affiliation_identifier_scheme"] == "scheme" + assert admin_response_data[0]["affiliation_identifier_scheme_uri"] == "uri" + assert admin_response_data[0]["role"] == "chair" + + assert editor_response_data[0]["first_name"] == "test" + assert editor_response_data[0]["last_name"] == "test" assert editor_response_data[0]["affiliation"] == "aff" - assert editor_response_data[0]["role"] == "Study Chair" - assert editor_response_data[1]["name"] == "admin-test" - assert editor_response_data[1]["affiliation"] == "admin-aff" - assert editor_response_data[1]["role"] == "Study Chair" - assert editor_response_data[2]["name"] == "editor-test" - assert editor_response_data[2]["affiliation"] == "editor-aff" - assert editor_response_data[2]["role"] == "Study Chair" - - assert viewer_response_data[0]["name"] == "test" + assert editor_response_data[0]["degree"] == "aff" + assert editor_response_data[0]["identifier"] == "identifier" + assert editor_response_data[0]["identifier_scheme"] == "scheme" + assert editor_response_data[0]["identifier_scheme_uri"] == "uri" + assert editor_response_data[0]["affiliation"] == "aff" + assert editor_response_data[0]["affiliation_identifier"] == "identifier" + assert editor_response_data[0]["affiliation_identifier_scheme"] == "scheme" + assert editor_response_data[0]["affiliation_identifier_scheme_uri"] == "uri" + assert editor_response_data[0]["role"] == "chair" + + assert viewer_response_data[0]["first_name"] == "test" + assert viewer_response_data[0]["last_name"] == "test" assert viewer_response_data[0]["affiliation"] == "aff" - assert viewer_response_data[0]["role"] == "Study Chair" - assert viewer_response_data[1]["name"] == "admin-test" - assert viewer_response_data[1]["affiliation"] == "admin-aff" - assert viewer_response_data[1]["role"] == "Study Chair" - assert viewer_response_data[2]["name"] == "editor-test" - assert viewer_response_data[2]["affiliation"] == "editor-aff" - assert viewer_response_data[2]["role"] == "Study Chair" + assert viewer_response_data[0]["degree"] == "aff" + assert viewer_response_data[0]["identifier"] == "identifier" + assert viewer_response_data[0]["identifier_scheme"] == "scheme" + assert viewer_response_data[0]["identifier_scheme_uri"] == "uri" + assert viewer_response_data[0]["affiliation"] == "aff" + assert viewer_response_data[0]["affiliation_identifier"] == "identifier" + assert viewer_response_data[0]["affiliation_identifier_scheme"] == "scheme" + assert viewer_response_data[0]["affiliation_identifier_scheme_uri"] == "uri" + assert viewer_response_data[0]["role"] == "chair" + + assert response_data[1]["first_name"] == "admin test" + assert response_data[1]["last_name"] == "test" + assert response_data[1]["affiliation"] == "aff" + assert response_data[1]["degree"] == "aff" + assert response_data[1]["identifier"] == "identifier" + assert response_data[1]["identifier_scheme"] == "scheme" + assert response_data[1]["identifier_scheme_uri"] == "uri" + assert response_data[1]["affiliation"] == "aff" + assert response_data[1]["affiliation_identifier"] == "identifier" + assert response_data[1]["affiliation_identifier_scheme"] == "scheme" + assert response_data[1]["affiliation_identifier_scheme_uri"] == "uri" + assert response_data[1]["role"] == "chair" + + assert admin_response_data[1]["first_name"] == "admin test" + assert admin_response_data[1]["last_name"] == "test" + assert admin_response_data[1]["affiliation"] == "aff" + assert admin_response_data[1]["degree"] == "aff" + assert admin_response_data[1]["identifier"] == "identifier" + assert admin_response_data[1]["identifier_scheme"] == "scheme" + assert admin_response_data[1]["identifier_scheme_uri"] == "uri" + assert admin_response_data[1]["affiliation"] == "aff" + assert admin_response_data[1]["affiliation_identifier"] == "identifier" + assert admin_response_data[1]["affiliation_identifier_scheme"] == "scheme" + assert admin_response_data[1]["affiliation_identifier_scheme_uri"] == "uri" + assert admin_response_data[1]["role"] == "chair" + + assert editor_response_data[1]["first_name"] == "admin test" + assert editor_response_data[1]["last_name"] == "test" + assert editor_response_data[1]["affiliation"] == "aff" + assert editor_response_data[1]["degree"] == "aff" + assert editor_response_data[1]["identifier"] == "identifier" + assert editor_response_data[1]["identifier_scheme"] == "scheme" + assert editor_response_data[1]["identifier_scheme_uri"] == "uri" + assert editor_response_data[1]["affiliation"] == "aff" + assert editor_response_data[1]["affiliation_identifier"] == "identifier" + assert editor_response_data[1]["affiliation_identifier_scheme"] == "scheme" + assert editor_response_data[1]["affiliation_identifier_scheme_uri"] == "uri" + assert editor_response_data[1]["role"] == "chair" + + assert viewer_response_data[1]["first_name"] == "admin test" + assert viewer_response_data[1]["last_name"] == "test" + assert viewer_response_data[1]["affiliation"] == "aff" + assert viewer_response_data[1]["degree"] == "aff" + assert viewer_response_data[1]["identifier"] == "identifier" + assert viewer_response_data[1]["identifier_scheme"] == "scheme" + assert viewer_response_data[1]["identifier_scheme_uri"] == "uri" + assert viewer_response_data[1]["affiliation"] == "aff" + assert viewer_response_data[1]["affiliation_identifier"] == "identifier" + assert viewer_response_data[1]["affiliation_identifier_scheme"] == "scheme" + assert viewer_response_data[1]["affiliation_identifier_scheme_uri"] == "uri" + assert viewer_response_data[1]["role"] == "chair" + + assert response_data[2]["first_name"] == "editor test" + assert response_data[2]["last_name"] == "test" + assert response_data[2]["affiliation"] == "aff" + assert response_data[2]["degree"] == "aff" + assert response_data[2]["identifier"] == "identifier" + assert response_data[2]["identifier_scheme"] == "scheme" + assert response_data[2]["identifier_scheme_uri"] == "uri" + assert response_data[2]["affiliation"] == "aff" + assert response_data[2]["affiliation_identifier"] == "identifier" + assert response_data[2]["affiliation_identifier_scheme"] == "scheme" + assert response_data[2]["affiliation_identifier_scheme_uri"] == "uri" + assert response_data[2]["role"] == "chair" + + assert admin_response_data[2]["first_name"] == "editor test" + assert admin_response_data[2]["last_name"] == "test" + assert admin_response_data[2]["affiliation"] == "aff" + assert admin_response_data[2]["degree"] == "aff" + assert admin_response_data[2]["identifier"] == "identifier" + assert admin_response_data[2]["identifier_scheme"] == "scheme" + assert admin_response_data[2]["identifier_scheme_uri"] == "uri" + assert admin_response_data[2]["affiliation"] == "aff" + assert admin_response_data[2]["affiliation_identifier"] == "identifier" + assert admin_response_data[2]["affiliation_identifier_scheme"] == "scheme" + assert admin_response_data[2]["affiliation_identifier_scheme_uri"] == "uri" + assert admin_response_data[2]["role"] == "chair" + + assert editor_response_data[2]["first_name"] == "editor test" + assert editor_response_data[2]["last_name"] == "test" + assert editor_response_data[2]["affiliation"] == "aff" + assert editor_response_data[2]["degree"] == "aff" + assert editor_response_data[2]["identifier"] == "identifier" + assert editor_response_data[2]["identifier_scheme"] == "scheme" + assert editor_response_data[2]["identifier_scheme_uri"] == "uri" + assert editor_response_data[2]["affiliation"] == "aff" + assert editor_response_data[2]["affiliation_identifier"] == "identifier" + assert editor_response_data[2]["affiliation_identifier_scheme"] == "scheme" + assert editor_response_data[2]["affiliation_identifier_scheme_uri"] == "uri" + assert editor_response_data[2]["role"] == "chair" + + assert viewer_response_data[2]["first_name"] == "editor test" + assert viewer_response_data[2]["last_name"] == "test" + assert viewer_response_data[2]["affiliation"] == "aff" + assert viewer_response_data[2]["degree"] == "aff" + assert viewer_response_data[2]["identifier"] == "identifier" + assert viewer_response_data[2]["identifier_scheme"] == "scheme" + assert viewer_response_data[2]["identifier_scheme_uri"] == "uri" + assert viewer_response_data[2]["affiliation"] == "aff" + assert viewer_response_data[2]["affiliation_identifier"] == "identifier" + assert viewer_response_data[2]["affiliation_identifier_scheme"] == "scheme" + assert viewer_response_data[2]["affiliation_identifier_scheme_uri"] == "uri" + assert viewer_response_data[2]["role"] == "chair" def test_delete_overall_official_metadata(clients): @@ -2758,33 +3056,65 @@ def test_put_oversight_metadata(clients): study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( - f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": True} + f"/study/{study_id}/metadata/oversight", + json={ + "fda_regulated_drug": "drug", + "fda_regulated_device": "device", + "has_dmc": "yes", + "human_subject_review_status": "yes", + }, ) assert response.status_code == 200 response_data = json.loads(response.data) - - assert response_data is True + assert response_data["fda_regulated_drug"] == "drug" + assert response_data["fda_regulated_device"] == "device" + assert response_data["has_dmc"] == "yes" + assert response_data["human_subject_review_status"] == "yes" admin_response = _admin_client.put( - f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": False} + f"/study/{study_id}/metadata/oversight", + json={ + "fda_regulated_drug": "drug", + "fda_regulated_device": "device", + "has_dmc": "yes", + "human_subject_review_status": "yes", + }, ) assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - assert admin_response_data is False + assert admin_response_data["fda_regulated_drug"] == "drug" + assert admin_response_data["fda_regulated_device"] == "device" + assert admin_response_data["has_dmc"] == "yes" + assert admin_response_data["human_subject_review_status"] == "yes" editor_response = _editor_client.put( - f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": True} + f"/study/{study_id}/metadata/oversight", + json={ + "fda_regulated_drug": "drug", + "fda_regulated_device": "device", + "has_dmc": "yes", + "human_subject_review_status": "yes", + }, ) editor_response_data = json.loads(editor_response.data) - assert editor_response_data is True + assert editor_response_data["fda_regulated_drug"] == "drug" + assert editor_response_data["fda_regulated_device"] == "device" + assert editor_response_data["has_dmc"] == "yes" + assert editor_response_data["human_subject_review_status"] == "yes" viewer_response = _viewer_client.put( - f"/study/{study_id}/metadata/oversight", json={"oversight_has_dmc": False} + f"/study/{study_id}/metadata/oversight", + json={ + "fda_regulated_drug": "drug", + "fda_regulated_device": "device", + "has_dmc": "yes", + "human_subject_review_status": "yes", + }, ) assert viewer_response.status_code == 403 @@ -2814,193 +3144,25 @@ def test_get_oversight_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data["oversight"] is True - assert admin_response_data["oversight"] is True - assert editor_response_data["oversight"] is True - assert viewer_response_data["oversight"] is True - - -# ------------------- REFERENCE METADATA ------------------- # -def test_post_reference_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (POST) - THEN check that the response is valid and creates the reference metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/reference", - json=[ - { - "identifier": "reference identifier", - "type": "Yes", - "citation": "reference citation", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_reference_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "reference identifier" - assert response_data[0]["type"] == "Yes" - assert response_data[0]["citation"] == "reference citation" - - admin_response = _admin_client.post( - f"/study/{study_id}/metadata/reference", - json=[ - { - "identifier": "admin-reference identifier", - "type": "Yes", - "citation": "admin-reference citation", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_reference_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "admin-reference identifier" - assert admin_response_data[0]["type"] == "Yes" - assert admin_response_data[0]["citation"] == "admin-reference citation" - - editor_response = _editor_client.post( - f"/study/{study_id}/metadata/reference", - json=[ - { - "identifier": "editor-reference identifier", - "type": "Yes", - "citation": "editor-reference citation", - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_reference_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["identifier"] == "editor-reference identifier" - assert editor_response_data[0]["type"] == "Yes" - assert editor_response_data[0]["citation"] == "editor-reference citation" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/reference", - json=[ - { - "identifier": "viewer-reference identifier", - "type": "Yes", - "citation": "editor-reference citation", - } - ], - ) - - assert viewer_response.status_code == 403 - - -def test_get_reference_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/reference' endpoint is requested (GET) - THEN check that the response is valid and retrieves the reference metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/reference") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/reference") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/reference") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/reference") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) + assert response_data["fda_regulated_drug"] == "drug" + assert response_data["fda_regulated_device"] == "device" + assert response_data["has_dmc"] == "yes" + assert response_data["human_subject_review_status"] == "yes" - assert response_data[0]["identifier"] == "reference identifier" - assert response_data[0]["type"] == "Yes" - assert response_data[0]["citation"] == "reference citation" - assert response_data[1]["identifier"] == "admin-reference identifier" - assert response_data[1]["type"] == "Yes" - assert response_data[1]["citation"] == "admin-reference citation" - assert response_data[2]["identifier"] == "editor-reference identifier" - assert response_data[2]["type"] == "Yes" - assert response_data[2]["citation"] == "editor-reference citation" - - assert admin_response_data[0]["identifier"] == "reference identifier" - assert admin_response_data[0]["type"] == "Yes" - assert admin_response_data[0]["citation"] == "reference citation" - assert admin_response_data[1]["identifier"] == "admin-reference identifier" - assert admin_response_data[1]["type"] == "Yes" - assert admin_response_data[1]["citation"] == "admin-reference citation" - assert admin_response_data[2]["identifier"] == "editor-reference identifier" - assert admin_response_data[2]["type"] == "Yes" - assert admin_response_data[2]["citation"] == "editor-reference citation" - - assert editor_response_data[0]["identifier"] == "reference identifier" - assert editor_response_data[0]["type"] == "Yes" - assert editor_response_data[0]["citation"] == "reference citation" - assert editor_response_data[1]["identifier"] == "admin-reference identifier" - assert editor_response_data[1]["type"] == "Yes" - assert editor_response_data[1]["citation"] == "admin-reference citation" - assert editor_response_data[2]["identifier"] == "editor-reference identifier" - assert editor_response_data[2]["type"] == "Yes" - assert editor_response_data[2]["citation"] == "editor-reference citation" - - assert viewer_response_data[0]["identifier"] == "reference identifier" - assert viewer_response_data[0]["type"] == "Yes" - assert viewer_response_data[0]["citation"] == "reference citation" - assert viewer_response_data[1]["identifier"] == "admin-reference identifier" - assert viewer_response_data[1]["type"] == "Yes" - assert viewer_response_data[1]["citation"] == "admin-reference citation" - assert viewer_response_data[2]["identifier"] == "editor-reference identifier" - assert viewer_response_data[2]["type"] == "Yes" - assert viewer_response_data[2]["citation"] == "editor-reference citation" - - -def test_delete_reference_metadata(clients): - """ - Given a Flask application configured for testing and - a study ID and reference ID - WHEN the '/study/{study_id}/metadata/reference/{reference_id}' - endpoint is requested (DELETE) - THEN check that the response is valid and deletes the reference metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - reference_id = pytest.global_reference_id - admin_reference_id = pytest.global_reference_id_admin - editor_reference_id = pytest.global_reference_id_editor + assert admin_response_data["fda_regulated_drug"] == "drug" + assert admin_response_data["fda_regulated_device"] == "device" + assert admin_response_data["has_dmc"] == "yes" + assert admin_response_data["human_subject_review_status"] == "yes" - viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/reference/{reference_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/metadata/reference/{reference_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/reference/{admin_reference_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/reference/{editor_reference_id}" - ) + assert editor_response_data["fda_regulated_drug"] == "drug" + assert editor_response_data["fda_regulated_device"] == "device" + assert editor_response_data["has_dmc"] == "yes" + assert editor_response_data["human_subject_review_status"] == "yes" - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + assert viewer_response_data["fda_regulated_drug"] == "drug" + assert viewer_response_data["fda_regulated_device"] == "device" + assert viewer_response_data["has_dmc"] == "yes" + assert viewer_response_data["human_subject_review_status"] == "yes" # ------------------- SPONSORS METADATA ------------------- # @@ -3014,36 +3176,82 @@ def test_put_sponsors_metadata(clients): study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.put( - f"/study/{study_id}/metadata/sponsors", + f"/study/{study_id}/metadata/sponsor", json={ + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", "responsible_party_type": "Sponsor", - "responsible_party_investigator_name": "party name", - "responsible_party_investigator_title": "party title", - "responsible_party_investigator_affiliation": "party affiliation", - "lead_sponsor_name": "sponsor name", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", }, ) - assert response.status_code == 200 response_data = json.loads(response.data) assert response_data["responsible_party_type"] == "Sponsor" - assert response_data["responsible_party_investigator_name"] == "party name" - assert response_data["responsible_party_investigator_title"] == "party title" + assert response_data["responsible_party_investigator_first_name"] == "name" + assert response_data["responsible_party_investigator_last_name"] == "surname" + assert ( + response_data["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + response_data["responsible_party_investigator_identifier_value"] == "identifier" + ) + assert response_data["responsible_party_investigator_identifier_scheme"] == "scheme" + assert ( + response_data["responsible_party_investigator_identifier_scheme_uri"] == "uri" + ) + assert ( + response_data["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) assert ( - response_data["responsible_party_investigator_affiliation"] - == "party affiliation" # noqa: W503 + response_data["responsible_party_investigator_affiliation_identifier_value"] + == "identifier" ) - assert response_data["lead_sponsor_name"] == "sponsor name" + assert ( + response_data["responsible_party_investigator_affiliation_identifier_scheme"] + == "scheme" + ) + assert ( + response_data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert response_data["lead_sponsor_name"] == "name" + assert response_data["lead_sponsor_identifier"] == "identifier" + assert response_data["lead_sponsor_identifier_scheme"] == "scheme" + assert response_data["lead_sponsor_identifier_scheme_uri"] == "uri" admin_response = _admin_client.put( - f"/study/{study_id}/metadata/sponsors", + f"/study/{study_id}/metadata/sponsor", json={ + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", "responsible_party_type": "Sponsor", - "responsible_party_investigator_name": "admin sponsor name", - "responsible_party_investigator_title": "admin sponsor title", - "responsible_party_investigator_affiliation": "admin sponsor affiliation", - "lead_sponsor_name": "admin sponsor name", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", }, ) @@ -3051,34 +3259,68 @@ def test_put_sponsors_metadata(clients): admin_response_data = json.loads(admin_response.data) assert admin_response_data["responsible_party_type"] == "Sponsor" + assert admin_response_data["responsible_party_investigator_first_name"] == "name" + assert admin_response_data["responsible_party_investigator_last_name"] == "surname" + assert ( + admin_response_data["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + admin_response_data["responsible_party_investigator_identifier_value"] + == "identifier" + ) assert ( - admin_response_data["responsible_party_investigator_name"] - == "admin sponsor name" + admin_response_data["responsible_party_investigator_identifier_scheme"] + == "scheme" ) assert ( - admin_response_data["responsible_party_investigator_title"] - == "admin sponsor title" + admin_response_data["responsible_party_investigator_identifier_scheme_uri"] + == "uri" ) - # pylint: disable=line-too-long assert ( - admin_response_data["responsible_party_investigator_affiliation"] - == "admin sponsor affiliation" + admin_response_data["responsible_party_investigator_affiliation_name"] + == "affiliation" ) - # pylint: disable=line-too-long assert ( - admin_response_data["responsible_party_investigator_affiliation"] - == "admin sponsor affiliation" + admin_response_data[ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" ) - assert admin_response_data["lead_sponsor_name"] == "admin sponsor name" + assert ( + admin_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert admin_response_data["lead_sponsor_name"] == "name" + assert admin_response_data["lead_sponsor_identifier"] == "identifier" + assert admin_response_data["lead_sponsor_identifier_scheme"] == "scheme" + assert admin_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" editor_response = _editor_client.put( - f"/study/{study_id}/metadata/sponsors", + f"/study/{study_id}/metadata/sponsor", json={ + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", "responsible_party_type": "Sponsor", - "responsible_party_investigator_name": "editor sponsor name", - "responsible_party_investigator_title": "editor sponsor title", - "responsible_party_investigator_affiliation": "editor sponsor affiliation", - "lead_sponsor_name": "editor sponsor name", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", }, ) @@ -3086,28 +3328,68 @@ def test_put_sponsors_metadata(clients): editor_response_data = json.loads(editor_response.data) assert editor_response_data["responsible_party_type"] == "Sponsor" + assert editor_response_data["responsible_party_investigator_first_name"] == "name" + assert editor_response_data["responsible_party_investigator_last_name"] == "surname" assert ( - editor_response_data["responsible_party_investigator_name"] - == "editor sponsor name" + editor_response_data["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + editor_response_data["responsible_party_investigator_identifier_value"] + == "identifier" ) assert ( - editor_response_data["responsible_party_investigator_title"] - == "editor sponsor title" + editor_response_data["responsible_party_investigator_identifier_scheme"] + == "scheme" ) assert ( - editor_response_data["responsible_party_investigator_affiliation"] - == "editor sponsor affiliation" - ) # noqa: E501 - assert editor_response_data["lead_sponsor_name"] == "editor sponsor name" + editor_response_data["responsible_party_investigator_identifier_scheme_uri"] + == "uri" + ) + assert ( + editor_response_data["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) + assert ( + editor_response_data[ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert editor_response_data["lead_sponsor_name"] == "name" + assert editor_response_data["lead_sponsor_identifier"] == "identifier" + assert editor_response_data["lead_sponsor_identifier_scheme"] == "scheme" + assert editor_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" viewer_response = _viewer_client.put( - f"/study/{study_id}/metadata/sponsors", + f"/study/{study_id}/metadata/sponsor", json={ "responsible_party_type": "Sponsor", - "responsible_party_investigator_name": "viewer sponsor name", - "responsible_party_investigator_title": "viewer sponsor title", - "responsible_party_investigator_affiliation": "viewer sponsor affiliation", - "lead_sponsor_name": "viewer sponsor name", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", }, ) @@ -3123,10 +3405,10 @@ def test_get_sponsors_metadata(clients): _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/sponsors") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/sponsors") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/sponsors") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/sponsors") + response = _logged_in_client.get(f"/study/{study_id}/metadata/sponsor") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/sponsor") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/sponsor") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/sponsor") assert response.status_code == 200 assert admin_response.status_code == 200 @@ -3139,60 +3421,176 @@ def test_get_sponsors_metadata(clients): viewer_response_data = json.loads(viewer_response.data) assert response_data["responsible_party_type"] == "Sponsor" - assert response_data["responsible_party_investigator_name"] == "editor sponsor name" + assert response_data["responsible_party_investigator_first_name"] == "name" + assert response_data["responsible_party_investigator_last_name"] == "surname" + assert ( + response_data["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + + assert ( + response_data["responsible_party_investigator_identifier_value"] == "identifier" + ) + assert response_data["responsible_party_investigator_identifier_scheme"] == "scheme" + assert ( + response_data["responsible_party_investigator_identifier_scheme_uri"] == "uri" + ) + assert ( + response_data["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) + assert ( + response_data["responsible_party_investigator_affiliation_identifier_value"] + == "identifier" + ) assert ( - response_data["responsible_party_investigator_title"] == "editor sponsor title" + response_data["responsible_party_investigator_affiliation_identifier_scheme"] + == "scheme" ) assert ( - response_data["responsible_party_investigator_affiliation"] - == "editor sponsor affiliation" + response_data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" ) - assert response_data["lead_sponsor_name"] == "editor sponsor name" + assert response_data["lead_sponsor_name"] == "name" + assert response_data["lead_sponsor_identifier"] == "identifier" + assert response_data["lead_sponsor_identifier_scheme"] == "scheme" + assert response_data["lead_sponsor_identifier_scheme_uri"] == "uri" assert admin_response_data["responsible_party_type"] == "Sponsor" + assert admin_response_data["responsible_party_investigator_first_name"] == "name" + assert admin_response_data["responsible_party_investigator_last_name"] == "surname" + assert ( + admin_response_data["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + admin_response_data["responsible_party_investigator_identifier_value"] + == "identifier" + ) + assert ( + admin_response_data["responsible_party_investigator_identifier_scheme"] + == "scheme" + ) + assert ( + admin_response_data["responsible_party_investigator_identifier_scheme_uri"] + == "uri" + ) + assert ( + admin_response_data["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) assert ( - admin_response_data["responsible_party_investigator_name"] - == "editor sponsor name" + admin_response_data[ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" ) assert ( - admin_response_data["responsible_party_investigator_title"] - == "editor sponsor title" + admin_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" ) assert ( - admin_response_data["responsible_party_investigator_affiliation"] - == "editor sponsor affiliation" + admin_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" ) - assert admin_response_data["lead_sponsor_name"] == "editor sponsor name" + assert admin_response_data["lead_sponsor_name"] == "name" + assert admin_response_data["lead_sponsor_identifier"] == "identifier" + assert admin_response_data["lead_sponsor_identifier_scheme"] == "scheme" + assert admin_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" assert editor_response_data["responsible_party_type"] == "Sponsor" + assert editor_response_data["responsible_party_investigator_first_name"] == "name" + assert editor_response_data["responsible_party_investigator_last_name"] == "surname" + assert ( + editor_response_data["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + editor_response_data["responsible_party_investigator_identifier_value"] + == "identifier" + ) + assert ( + editor_response_data["responsible_party_investigator_identifier_scheme"] + == "scheme" + ) + assert ( + editor_response_data["responsible_party_investigator_identifier_scheme_uri"] + == "uri" + ) + assert ( + editor_response_data["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) assert ( - editor_response_data["responsible_party_investigator_name"] - == "editor sponsor name" + editor_response_data[ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" ) assert ( - editor_response_data["responsible_party_investigator_title"] - == "editor sponsor title" + editor_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" ) assert ( - editor_response_data["responsible_party_investigator_affiliation"] - == "editor sponsor affiliation" + editor_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" ) - assert editor_response_data["lead_sponsor_name"] == "editor sponsor name" + assert editor_response_data["lead_sponsor_name"] == "name" + assert editor_response_data["lead_sponsor_identifier"] == "identifier" + assert editor_response_data["lead_sponsor_identifier_scheme"] == "scheme" + assert editor_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" assert viewer_response_data["responsible_party_type"] == "Sponsor" + assert viewer_response_data["responsible_party_investigator_first_name"] == "name" + assert viewer_response_data["responsible_party_investigator_last_name"] == "surname" + assert ( + viewer_response_data["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + viewer_response_data["responsible_party_investigator_identifier_value"] + == "identifier" + ) + assert ( + viewer_response_data["responsible_party_investigator_identifier_scheme"] + == "scheme" + ) + assert ( + viewer_response_data["responsible_party_investigator_identifier_scheme_uri"] + == "uri" + ) assert ( - viewer_response_data["responsible_party_investigator_name"] - == "editor sponsor name" + viewer_response_data["responsible_party_investigator_affiliation_name"] + == "affiliation" ) assert ( - viewer_response_data["responsible_party_investigator_title"] - == "editor sponsor title" + viewer_response_data[ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" ) assert ( - viewer_response_data["responsible_party_investigator_affiliation"] - == "editor sponsor affiliation" + viewer_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" ) - assert viewer_response_data["lead_sponsor_name"] == "editor sponsor name" + assert ( + viewer_response_data[ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert viewer_response_data["lead_sponsor_name"] == "name" + assert viewer_response_data["lead_sponsor_identifier"] == "identifier" + assert viewer_response_data["lead_sponsor_identifier_scheme"] == "scheme" + assert viewer_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" # ------------------- STATUS METADATA ------------------- # diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index d15c0f36..d0433a6b 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -246,27 +246,24 @@ def test_get_version_study_metadata(clients): } ], ) - avail_ipd_response = _logged_in_client.post( - f"/study/{study_id}/metadata/available-ipd", - json=[ - { - "identifier": "identifier1", - "type": "Clinical Study Report", - "url": "google.com", - "comment": "comment1", - } - ], - ) + cc_response = _logged_in_client.post( f"/study/{study_id}/metadata/central-contact", json=[ { - "name": "central-contact", "affiliation": "affiliation", - "role": "role", "phone": "808", "phone_ext": "909", "email_address": "sample@gmail.com", + "first_name": "central-contact", + "last_name": "central-contact", + "degree": "degree", + "identifier": "central-contact", + "identifier_scheme": "id", + "identifier_scheme_uri": "uri", + "affiliation_identifier": "affiliation identifier", + "affiliation_identifier_scheme": "affiliation identifier scheme", + "affiliation_identifier_scheme_uri": "affiliation identifier scheme uri", } ], ) @@ -314,34 +311,70 @@ def test_get_version_study_metadata(clients): } ], ) - link_response = _logged_in_client.post( - f"/study/{study_id}/metadata/link", - json=[{"url": "google.com", "title": "google link"}], + collaborators_response = _logged_in_client.post( + f"/study/{study_id}/metadata/collaborators", + json=[ + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + } + ], + ) + conditions_response = _logged_in_client.post( + f"/study/{study_id}/metadata/conditions", + json=[ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + ) + keywords_response = _logged_in_client.post( + f"/study/{study_id}/metadata/keywords", + json=[ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], ) + of_response = _logged_in_client.post( f"/study/{study_id}/metadata/overall-official", - json=[{"name": "test", "affiliation": "aff", "role": "Study Chair"}], - ) - reference_response = _logged_in_client.post( - f"/study/{study_id}/metadata/reference", json=[ { - "identifier": "reference identifier", - "type": "Yes", - "citation": "reference citation", + "first_name": "test", + "last_name": "test", + "degree": "aff", + "identifier": "identifier", + "identifier_scheme": "scheme", + "identifier_scheme_uri": "uri", + "affiliation": "aff", + "affiliation_identifier": "identifier", + "affiliation_identifier_scheme": "scheme", + "affiliation_identifier_scheme_uri": "uri", + "role": "chair", } ], ) assert arm_response.status_code == 201 - assert avail_ipd_response.status_code == 201 assert cc_response.status_code == 201 assert location_response.status_code == 201 assert id_response.status_code == 201 assert intervention_response.status_code == 201 - assert link_response.status_code == 201 assert of_response.status_code == 201 - assert reference_response.status_code == 201 + assert collaborators_response.status_code == 201 + assert conditions_response.status_code == 201 + assert keywords_response.status_code == 201 response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" @@ -363,31 +396,18 @@ def test_get_version_study_metadata(clients): response_data = json.loads(response.data) admin_response_data = json.loads(admin_response.data) editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(editor_response.data) - assert response_data["available_ipd"][0]["identifier"] == "identifier1" - assert response_data["available_ipd"][0]["url"] == "google.com" assert response_data["arms"][0]["label"] == "Label1" - - assert response_data["contacts"][0]["name"] == "central-contact" - assert response_data["contacts"][0]["affiliation"] == "affiliation" - - assert response_data["secondary_identifiers"][0]["identifier"] == "test" - assert response_data["secondary_identifiers"][0]["identifier_type"] == "test" - assert response_data["interventions"][0]["type"] == "Device" - assert response_data["interventions"][0]["name"] == "name test" - assert response_data["links"][0]["title"] == "google link" - assert response_data["links"][0]["url"] == "google.com" - assert response_data["locations"][0]["country"] == "yes" - assert response_data["locations"][0]["facility"] == "test" - assert response_data["overall_officials"][0]["name"] == "test" - assert response_data["overall_officials"][0]["role"] == "Study Chair" - assert response_data["overall_officials"][0]["affiliation"] == "aff" - assert response_data["references"][0]["identifier"] == "reference identifier" - assert response_data["references"][0]["citation"] == "reference citation" - + assert response_data["central_contacts"][0]["phone"] == "808" + assert response_data["central_contacts"][0]["first_name"] == "central-contact" + assert response_data["central_contacts"][0]["last_name"] == "central-contact" + assert response_data["central_contacts"][0]["affiliation"] == "affiliation" + assert response_data["collaborators"][0]["name"] == "collaborator1123" + assert response_data["conditions"][0]["name"] == "condition" + assert response_data["keywords"][0]["name"] == "keywords" assert response_data["description"]["brief_summary"] == "editor-brief_summary" assert response_data["design"]["design_allocation"] == "editor-dfasdfasd" - assert response_data["design"]["study_type"] == "Interventional" assert response_data["design"]["design_intervention_model"] == "Treatment" assert response_data["design"]["design_primary_purpose"] == "Parallel Assignment" @@ -408,60 +428,48 @@ def test_get_version_study_metadata(clients): assert response_data["design"]["design_time_perspective_list"] == ["Other"] assert response_data["design"]["bio_spec_retention"] == "None Retained" assert response_data["design"]["target_duration"] == "rewrwe" - assert response_data["design"]["number_groups_cohorts"] == 1 - assert response_data["eligibility"]["gender"] == "All" + assert response_data["design"]["is_patient_registry"] == "yes" + assert response_data["eligibility"]["sex"] == "All" assert response_data["eligibility"]["gender_based"] == "Yes" assert response_data["eligibility"]["minimum_age_value"] == 18 assert response_data["primary_identifier"]["identifier"] == "test" assert response_data["primary_identifier"]["identifier_type"] == "test" - assert response_data["status"]["overall_status"] == "Withdrawn" - assert response_data["status"]["start_date"] == "2023-11-15 00:00:00" + assert response_data["secondary_identifiers"][0]["identifier"] == "test" + assert response_data["secondary_identifiers"][0]["identifier_type"] == "test" + assert response_data["interventions"][0]["type"] == "Device" + assert response_data["interventions"][0]["name"] == "name test" + assert response_data["locations"][0]["country"] == "yes" + assert response_data["locations"][0]["facility"] == "test" + assert response_data["overall_officials"][0]["first_name"] == "test" + assert response_data["overall_officials"][0]["last_name"] == "test" + assert response_data["overall_officials"][0]["role"] == "chair" + assert response_data["overall_officials"][0]["affiliation"] == "aff" + assert response_data["oversight"]["fda_regulated_drug"] == "drug" + assert response_data["oversight"]["fda_regulated_device"] == "device" + assert response_data["oversight"]["has_dmc"] == "yes" + assert response_data["oversight"]["human_subject_review_status"] == "yes" + assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" assert ( - response_data["sponsors"]["responsible_party_investigator_name"] - == "editor sponsor name" + response_data["sponsors"]["responsible_party_investigator_first_name"] == "name" ) - assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" - assert response_data["sponsors"]["lead_sponsor_name"] == "editor sponsor name" - assert response_data["collaborators"] == ["editor-collaborator1123"] - assert response_data["conditions"] == [ - "true", - "editor-conditions string", - "editor-keywords string", - "editor-size string", - ] - - assert response_data["ipd_sharing"]["ipd_sharing"] == "Yes" - assert response_data["ipd_sharing"]["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - - assert response_data["oversight"] is True + assert ( + response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert response_data["sponsors"]["lead_sponsor_name"] == "name" + assert response_data["status"]["overall_status"] == "Withdrawn" + assert response_data["status"]["start_date"] == "2023-11-15 00:00:00" - assert admin_response_data["available_ipd"][0]["identifier"] == "identifier1" - assert admin_response_data["available_ipd"][0]["url"] == "google.com" assert admin_response_data["arms"][0]["label"] == "Label1" - - assert admin_response_data["contacts"][0]["name"] == "central-contact" - assert admin_response_data["contacts"][0]["affiliation"] == "affiliation" - - assert admin_response_data["secondary_identifiers"][0]["identifier"] == "test" - assert admin_response_data["secondary_identifiers"][0]["identifier_type"] == "test" - assert admin_response_data["interventions"][0]["type"] == "Device" - assert admin_response_data["interventions"][0]["name"] == "name test" - assert admin_response_data["links"][0]["title"] == "google link" - assert admin_response_data["links"][0]["url"] == "google.com" - assert admin_response_data["locations"][0]["country"] == "yes" - assert admin_response_data["locations"][0]["facility"] == "test" - assert admin_response_data["overall_officials"][0]["name"] == "test" - assert admin_response_data["overall_officials"][0]["role"] == "Study Chair" - assert admin_response_data["overall_officials"][0]["affiliation"] == "aff" - assert admin_response_data["references"][0]["identifier"] == "reference identifier" - assert admin_response_data["references"][0]["citation"] == "reference citation" - + assert admin_response_data["central_contacts"][0]["phone"] == "808" + assert admin_response_data["central_contacts"][0]["first_name"] == "central-contact" + assert admin_response_data["central_contacts"][0]["last_name"] == "central-contact" + assert admin_response_data["central_contacts"][0]["affiliation"] == "affiliation" + assert admin_response_data["collaborators"][0]["name"] == "collaborator1123" + assert admin_response_data["conditions"][0]["name"] == "condition" + assert admin_response_data["keywords"][0]["name"] == "keywords" assert admin_response_data["description"]["brief_summary"] == "editor-brief_summary" assert admin_response_data["design"]["design_allocation"] == "editor-dfasdfasd" - assert admin_response_data["design"]["study_type"] == "Interventional" assert admin_response_data["design"]["design_intervention_model"] == "Treatment" assert ( @@ -484,62 +492,53 @@ def test_get_version_study_metadata(clients): assert admin_response_data["design"]["design_time_perspective_list"] == ["Other"] assert admin_response_data["design"]["bio_spec_retention"] == "None Retained" assert admin_response_data["design"]["target_duration"] == "rewrwe" - assert admin_response_data["design"]["number_groups_cohorts"] == 1 - assert admin_response_data["eligibility"]["gender"] == "All" + assert admin_response_data["design"]["is_patient_registry"] == "yes" + assert admin_response_data["eligibility"]["sex"] == "All" assert admin_response_data["eligibility"]["gender_based"] == "Yes" assert admin_response_data["eligibility"]["minimum_age_value"] == 18 assert admin_response_data["primary_identifier"]["identifier"] == "test" assert admin_response_data["primary_identifier"]["identifier_type"] == "test" - assert admin_response_data["status"]["overall_status"] == "Withdrawn" - assert admin_response_data["status"]["start_date"] == "2023-11-15 00:00:00" + assert admin_response_data["secondary_identifiers"][0]["identifier"] == "test" + assert admin_response_data["secondary_identifiers"][0]["identifier_type"] == "test" + assert admin_response_data["interventions"][0]["type"] == "Device" + assert admin_response_data["interventions"][0]["name"] == "name test" + assert admin_response_data["locations"][0]["country"] == "yes" + assert admin_response_data["locations"][0]["facility"] == "test" + assert admin_response_data["overall_officials"][0]["first_name"] == "test" + assert admin_response_data["overall_officials"][0]["last_name"] == "test" + assert admin_response_data["overall_officials"][0]["role"] == "chair" + assert admin_response_data["overall_officials"][0]["affiliation"] == "aff" + assert admin_response_data["oversight"]["fda_regulated_drug"] == "drug" + assert admin_response_data["oversight"]["fda_regulated_device"] == "device" + assert admin_response_data["oversight"]["has_dmc"] == "yes" + assert admin_response_data["oversight"]["human_subject_review_status"] == "yes" + assert admin_response_data["sponsors"]["responsible_party_type"] == "Sponsor" assert ( - admin_response_data["sponsors"]["responsible_party_investigator_name"] - == "editor sponsor name" + admin_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" ) - assert admin_response_data["sponsors"]["responsible_party_type"] == "Sponsor" - assert admin_response_data["sponsors"]["lead_sponsor_name"] == "editor sponsor name" - assert admin_response_data["collaborators"] == ["editor-collaborator1123"] - assert admin_response_data["conditions"] == [ - "true", - "editor-conditions string", - "editor-keywords string", - "editor-size string", - ] - - assert admin_response_data["ipd_sharing"]["ipd_sharing"] == "Yes" - assert admin_response_data["ipd_sharing"]["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", - ] - - assert admin_response_data["oversight"] is True + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert admin_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert admin_response_data["status"]["overall_status"] == "Withdrawn" + assert admin_response_data["status"]["start_date"] == "2023-11-15 00:00:00" - assert editor_response_data["available_ipd"][0]["identifier"] == "identifier1" - assert editor_response_data["available_ipd"][0]["url"] == "google.com" assert editor_response_data["arms"][0]["label"] == "Label1" - - assert editor_response_data["contacts"][0]["name"] == "central-contact" - assert editor_response_data["contacts"][0]["affiliation"] == "affiliation" - - assert editor_response_data["secondary_identifiers"][0]["identifier"] == "test" - assert editor_response_data["secondary_identifiers"][0]["identifier_type"] == "test" - assert editor_response_data["interventions"][0]["type"] == "Device" - assert editor_response_data["interventions"][0]["name"] == "name test" - assert editor_response_data["links"][0]["title"] == "google link" - assert editor_response_data["links"][0]["url"] == "google.com" - assert editor_response_data["locations"][0]["country"] == "yes" - assert editor_response_data["locations"][0]["facility"] == "test" - assert editor_response_data["overall_officials"][0]["name"] == "test" - assert editor_response_data["overall_officials"][0]["role"] == "Study Chair" - assert editor_response_data["overall_officials"][0]["affiliation"] == "aff" - assert editor_response_data["references"][0]["identifier"] == "reference identifier" - assert editor_response_data["references"][0]["citation"] == "reference citation" - + assert editor_response_data["central_contacts"][0]["phone"] == "808" + assert ( + editor_response_data["central_contacts"][0]["first_name"] == "central-contact" + ) + assert editor_response_data["central_contacts"][0]["last_name"] == "central-contact" + assert editor_response_data["central_contacts"][0]["affiliation"] == "affiliation" + assert editor_response_data["collaborators"][0]["name"] == "collaborator1123" + assert editor_response_data["conditions"][0]["name"] == "condition" + assert editor_response_data["keywords"][0]["name"] == "keywords" assert ( editor_response_data["description"]["brief_summary"] == "editor-brief_summary" ) assert editor_response_data["design"]["design_allocation"] == "editor-dfasdfasd" - assert editor_response_data["design"]["study_type"] == "Interventional" assert editor_response_data["design"]["design_intervention_model"] == "Treatment" assert ( @@ -563,37 +562,108 @@ def test_get_version_study_metadata(clients): assert editor_response_data["design"]["design_time_perspective_list"] == ["Other"] assert editor_response_data["design"]["bio_spec_retention"] == "None Retained" assert editor_response_data["design"]["target_duration"] == "rewrwe" - assert editor_response_data["design"]["number_groups_cohorts"] == 1 - assert editor_response_data["eligibility"]["gender"] == "All" + assert editor_response_data["design"]["is_patient_registry"] == "yes" + assert editor_response_data["eligibility"]["sex"] == "All" assert editor_response_data["eligibility"]["gender_based"] == "Yes" assert editor_response_data["eligibility"]["minimum_age_value"] == 18 assert editor_response_data["primary_identifier"]["identifier"] == "test" assert editor_response_data["primary_identifier"]["identifier_type"] == "test" + assert editor_response_data["secondary_identifiers"][0]["identifier"] == "test" + assert editor_response_data["secondary_identifiers"][0]["identifier_type"] == "test" + assert editor_response_data["interventions"][0]["type"] == "Device" + assert editor_response_data["interventions"][0]["name"] == "name test" + assert editor_response_data["locations"][0]["country"] == "yes" + assert editor_response_data["locations"][0]["facility"] == "test" + assert editor_response_data["overall_officials"][0]["first_name"] == "test" + assert editor_response_data["overall_officials"][0]["last_name"] == "test" + assert editor_response_data["overall_officials"][0]["role"] == "chair" + assert editor_response_data["overall_officials"][0]["affiliation"] == "aff" + assert editor_response_data["oversight"]["fda_regulated_drug"] == "drug" + assert editor_response_data["oversight"]["fda_regulated_device"] == "device" + assert editor_response_data["oversight"]["has_dmc"] == "yes" + assert editor_response_data["oversight"]["human_subject_review_status"] == "yes" + assert editor_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert editor_response_data["sponsors"]["lead_sponsor_name"] == "name" assert editor_response_data["status"]["overall_status"] == "Withdrawn" assert editor_response_data["status"]["start_date"] == "2023-11-15 00:00:00" + + assert viewer_response_data["arms"][0]["label"] == "Label1" + assert viewer_response_data["central_contacts"][0]["phone"] == "808" assert ( - editor_response_data["sponsors"]["responsible_party_investigator_name"] - == "editor sponsor name" + viewer_response_data["central_contacts"][0]["first_name"] == "central-contact" ) - assert editor_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert viewer_response_data["central_contacts"][0]["last_name"] == "central-contact" + assert viewer_response_data["central_contacts"][0]["affiliation"] == "affiliation" + assert viewer_response_data["collaborators"][0]["name"] == "collaborator1123" + assert viewer_response_data["conditions"][0]["name"] == "condition" + assert viewer_response_data["keywords"][0]["name"] == "keywords" + assert ( + viewer_response_data["description"]["brief_summary"] == "editor-brief_summary" + ) + assert viewer_response_data["design"]["design_allocation"] == "editor-dfasdfasd" + assert viewer_response_data["design"]["study_type"] == "Interventional" + assert viewer_response_data["design"]["design_intervention_model"] == "Treatment" assert ( - editor_response_data["sponsors"]["lead_sponsor_name"] == "editor sponsor name" - ) - assert editor_response_data["collaborators"] == ["editor-collaborator1123"] - assert editor_response_data["conditions"] == [ - "true", - "editor-conditions string", - "editor-keywords string", - "editor-size string", + viewer_response_data["design"]["design_primary_purpose"] + == "Parallel Assignment" + ) + assert viewer_response_data["design"]["design_masking"] == "Double" + assert viewer_response_data["design"]["design_masking_description"] == "tewsfdasf" + assert viewer_response_data["design"]["design_who_masked_list"] == [ + "Participant", + "Care Provider", ] - - assert editor_response_data["ipd_sharing"]["ipd_sharing"] == "Yes" - assert editor_response_data["ipd_sharing"]["ipd_sharing_info_type_list"] == [ - "Study Protocol", - "Analytical Code", + assert viewer_response_data["design"]["phase_list"] == ["N/A"] + assert viewer_response_data["design"]["enrollment_count"] == 3 + assert viewer_response_data["design"]["enrollment_type"] == "Actual" + assert viewer_response_data["design"]["number_arms"] == 2 + assert viewer_response_data["design"]["design_observational_model_list"] == [ + "Cohort", + "Case-Control", ] - - assert editor_response_data["oversight"] is True + assert viewer_response_data["design"]["design_time_perspective_list"] == ["Other"] + assert viewer_response_data["design"]["bio_spec_retention"] == "None Retained" + assert viewer_response_data["design"]["target_duration"] == "rewrwe" + assert viewer_response_data["design"]["is_patient_registry"] == "yes" + assert viewer_response_data["eligibility"]["sex"] == "All" + assert viewer_response_data["eligibility"]["gender_based"] == "Yes" + assert viewer_response_data["eligibility"]["minimum_age_value"] == 18 + assert viewer_response_data["primary_identifier"]["identifier"] == "test" + assert viewer_response_data["primary_identifier"]["identifier_type"] == "test" + assert viewer_response_data["secondary_identifiers"][0]["identifier"] == "test" + assert viewer_response_data["secondary_identifiers"][0]["identifier_type"] == "test" + assert viewer_response_data["interventions"][0]["type"] == "Device" + assert viewer_response_data["interventions"][0]["name"] == "name test" + assert viewer_response_data["locations"][0]["country"] == "yes" + assert viewer_response_data["locations"][0]["facility"] == "test" + assert viewer_response_data["overall_officials"][0]["first_name"] == "test" + assert viewer_response_data["overall_officials"][0]["last_name"] == "test" + assert viewer_response_data["overall_officials"][0]["role"] == "chair" + assert viewer_response_data["overall_officials"][0]["affiliation"] == "aff" + assert viewer_response_data["oversight"]["fda_regulated_drug"] == "drug" + assert viewer_response_data["oversight"]["fda_regulated_device"] == "device" + assert viewer_response_data["oversight"]["has_dmc"] == "yes" + assert viewer_response_data["oversight"]["human_subject_review_status"] == "yes" + assert viewer_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert viewer_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert viewer_response_data["status"]["overall_status"] == "Withdrawn" + assert viewer_response_data["status"]["start_date"] == "2023-11-15 00:00:00" def test_get_version_dataset_metadata(clients): @@ -675,6 +745,7 @@ def test_get_version_dataset_metadata(clients): { "identifier": "Identifier", "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", "rights": "Rights", "uri": "URI", "license_text": "license text", @@ -712,7 +783,7 @@ def test_get_version_dataset_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -791,8 +862,12 @@ def test_get_version_dataset_metadata(clients): assert response_data["identifiers"][0]["identifier"] == "identifier test" assert response_data["identifiers"][0]["type"] == "ARK" - assert response_data["related_identifier"][0]["identifier"] == "editor test identifier" - assert response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert ( + response_data["related_identifier"][0]["identifier"] == "editor test identifier" + ) + assert ( + response_data["related_identifier"][0]["relation_type"] == "test relation type" + ) assert response_data["related_identifier"][0]["resource_type"] == "test" assert admin_response_data["contributors"][0]["given_name"] == "Given Name here" @@ -828,8 +903,14 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["identifiers"][0]["identifier"] == "identifier test" assert admin_response_data["identifiers"][0]["type"] == "ARK" - assert admin_response_data["related_identifier"][0]["identifier"] == "editor test identifier" - assert admin_response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert ( + admin_response_data["related_identifier"][0]["identifier"] + == "editor test identifier" + ) + assert ( + admin_response_data["related_identifier"][0]["relation_type"] + == "test relation type" + ) assert admin_response_data["related_identifier"][0]["resource_type"] == "test" assert editor_response_data["contributors"][0]["family_name"] == "Family Name here" @@ -865,8 +946,14 @@ def test_get_version_dataset_metadata(clients): assert editor_response_data["identifiers"][0]["identifier"] == "identifier test" assert editor_response_data["identifiers"][0]["type"] == "ARK" - assert editor_response_data["related_identifier"][0]["identifier"] == "editor test identifier" - assert editor_response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert ( + editor_response_data["related_identifier"][0]["identifier"] + == "editor test identifier" + ) + assert ( + editor_response_data["related_identifier"][0]["relation_type"] + == "test relation type" + ) assert editor_response_data["related_identifier"][0]["resource_type"] == "test" From 6a41186975411c1221894b7fe18c12c9530a205b Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Mon, 11 Mar 2024 16:23:05 -0700 Subject: [PATCH 449/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20study=20metadata?= =?UTF-8?q?=20database=20updates=20(#51)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: study oversight table nullability * fix: study intervention remove a column * fix: study intervention test * fix: change study location table name --- apis/study.py | 2 +- apis/study_metadata/study_intervention.py | 9 +-------- apis/study_metadata/study_oversight.py | 6 +++--- model/study_metadata/study_intervention.py | 3 --- .../study_location_contact_list.py | 2 +- model/study_metadata/study_oversight.py | 8 ++++---- tests/functional/test_study_metadata_api.py | 19 ------------------- tests/functional/test_study_version_api.py | 1 - 8 files changed, 10 insertions(+), 40 deletions(-) diff --git a/apis/study.py b/apis/study.py index c20ad6a7..19768204 100644 --- a/apis/study.py +++ b/apis/study.py @@ -120,7 +120,7 @@ def put(self, study_id: int): "properties": { "title": {"type": "string", "minLength": 1}, "image": {"type": "string", "minLength": 1}, - "acronym": {"type": "string", "minLength": 1, "maxLength": 14}, + "acronym": {"type": "string", "maxLength": 14}, }, } diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index e53fa828..fb3641a6 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -18,7 +18,6 @@ "type": fields.String(required=True), "name": fields.String(required=True), "description": fields.String(required=True), - "arm_group_label_list": fields.List(fields.String, required=True), "other_name_list": fields.List(fields.String, required=True), }, ) @@ -76,19 +75,13 @@ def post(self, study_id: int): }, "name": {"type": "string", "minLength": 1}, "description": {"type": "string"}, - "arm_group_label_list": { - "type": "array", - "items": {"type": "string", "minLength": 1}, - "minItems": 1, - "uniqueItems": True, - }, "other_name_list": { "type": "array", "items": {"type": "string", "minLength": 1}, "uniqueItems": True, }, }, - "required": ["name", "type", "arm_group_label_list"], + "required": ["name", "type"], }, "uniqueItems": True, } diff --git a/apis/study_metadata/study_oversight.py b/apis/study_metadata/study_oversight.py index 6184f9af..71eded68 100644 --- a/apis/study_metadata/study_oversight.py +++ b/apis/study_metadata/study_oversight.py @@ -47,9 +47,9 @@ def put(self, study_id: int): "type": "object", "additionalProperties": False, "properties": { - "fda_regulated_drug": {"type": "string", "minLength": 1}, - "fda_regulated_device": {"type": "string", "minLength": 1}, - "has_dmc": {"type": "string"}, + "fda_regulated_drug": {"type": ["string", "null"], "minLength": 1}, + "fda_regulated_device": {"type": ["string", "null"], "minLength": 1}, + "has_dmc": {"type": ["string", "null"]}, "human_subject_review_status": {"type": "string"}, }, "required": [ diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index bb946cd2..89ff4b90 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -24,7 +24,6 @@ def __init__(self, study): type = db.Column(db.String, nullable=True) name = db.Column(db.String, nullable=False) description = db.Column(db.String, nullable=False) - arm_group_label_list = db.Column(ARRAY(String), nullable=False) other_name_list = db.Column(ARRAY(String), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) @@ -40,7 +39,6 @@ def to_dict(self): "type": self.type, "name": self.name, "description": self.description, - "arm_group_label_list": self.arm_group_label_list, "other_name_list": self.other_name_list, "created_at": self.created_at, } @@ -66,7 +64,6 @@ def update(self, data: dict): self.type = data["type"] self.name = data["name"] self.description = data["description"] - self.arm_group_label_list = data["arm_group_label_list"] self.other_name_list = data["other_name_list"] self.study.touch() diff --git a/model/study_metadata/study_location_contact_list.py b/model/study_metadata/study_location_contact_list.py index c6895364..9b853d90 100644 --- a/model/study_metadata/study_location_contact_list.py +++ b/model/study_metadata/study_location_contact_list.py @@ -15,7 +15,7 @@ def __init__(self, study): self.study = study self.created_at = datetime.datetime.now(timezone.utc).timestamp() - __tablename__ = "study_location_location_list" + __tablename__ = "study_location_contact_list" id = db.Column(db.CHAR(36), primary_key=True) first_name = db.Column(db.String, nullable=False) diff --git a/model/study_metadata/study_oversight.py b/model/study_metadata/study_oversight.py index 98e95e41..3fc0c0cb 100644 --- a/model/study_metadata/study_oversight.py +++ b/model/study_metadata/study_oversight.py @@ -15,10 +15,10 @@ def __init__(self, study): __tablename__ = "study_oversight" - fda_regulated_drug = db.Column(db.String, nullable=False) - fda_regulated_device = db.Column(db.String, nullable=False) - human_subject_review_status = db.Column(db.String, nullable=False) - has_dmc = db.Column(db.String, nullable=False) + fda_regulated_drug = db.Column(db.String, nullable=True) + fda_regulated_device = db.Column(db.String, nullable=True) + human_subject_review_status = db.Column(db.String, nullable=True) + has_dmc = db.Column(db.String, nullable=True) study_id = db.Column( db.CHAR(36), diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index 8a9623ea..d3378152 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -2247,7 +2247,6 @@ def test_post_intervention_metadata(clients): "type": "Device", "name": "name test", "description": "desc", - "arm_group_label_list": ["test", "one"], "other_name_list": ["uhh", "yes"], } ], @@ -2262,7 +2261,6 @@ def test_post_intervention_metadata(clients): assert response_data[0]["type"] == "Device" assert response_data[0]["name"] == "name test" assert response_data[0]["description"] == "desc" - assert response_data[0]["arm_group_label_list"] == ["test", "one"] assert response_data[0]["other_name_list"] == ["uhh", "yes"] admin_response = _admin_client.post( @@ -2272,7 +2270,6 @@ def test_post_intervention_metadata(clients): "type": "Device", "name": "admin-name test", "description": "desc", - "arm_group_label_list": ["test", "one"], "other_name_list": ["uhh", "yes"], } ], @@ -2287,7 +2284,6 @@ def test_post_intervention_metadata(clients): assert admin_response_data[0]["type"] == "Device" assert admin_response_data[0]["name"] == "admin-name test" assert admin_response_data[0]["description"] == "desc" - assert admin_response_data[0]["arm_group_label_list"] == ["test", "one"] assert admin_response_data[0]["other_name_list"] == ["uhh", "yes"] editor_response = _editor_client.post( @@ -2297,7 +2293,6 @@ def test_post_intervention_metadata(clients): "type": "Device", "name": "editor-name test", "description": "desc", - "arm_group_label_list": ["test", "one"], "other_name_list": ["uhh", "yes"], } ], @@ -2310,7 +2305,6 @@ def test_post_intervention_metadata(clients): assert editor_response_data[0]["type"] == "Device" assert editor_response_data[0]["name"] == "editor-name test" assert editor_response_data[0]["description"] == "desc" - assert editor_response_data[0]["arm_group_label_list"] == ["test", "one"] assert editor_response_data[0]["other_name_list"] == ["uhh", "yes"] viewer_response = _viewer_client.post( @@ -2320,7 +2314,6 @@ def test_post_intervention_metadata(clients): "type": "Device", "name": "viewer-name test", "description": "desc", - "arm_group_label_list": ["test", "one"], "other_name_list": ["uhh", "yes"], } ], @@ -2356,65 +2349,53 @@ def test_get_intervention_metadata(clients): assert response_data[0]["type"] == "Device" assert response_data[0]["name"] == "name test" assert response_data[0]["description"] == "desc" - assert response_data[0]["arm_group_label_list"] == ["test", "one"] assert response_data[0]["other_name_list"] == ["uhh", "yes"] assert response_data[1]["type"] == "Device" assert response_data[1]["name"] == "admin-name test" assert response_data[1]["description"] == "desc" - assert response_data[1]["arm_group_label_list"] == ["test", "one"] assert response_data[1]["other_name_list"] == ["uhh", "yes"] assert response_data[2]["type"] == "Device" assert response_data[2]["name"] == "editor-name test" assert response_data[2]["description"] == "desc" - assert response_data[2]["arm_group_label_list"] == ["test", "one"] assert response_data[2]["other_name_list"] == ["uhh", "yes"] assert admin_response_data[0]["type"] == "Device" assert admin_response_data[0]["name"] == "name test" assert admin_response_data[0]["description"] == "desc" - assert admin_response_data[0]["arm_group_label_list"] == ["test", "one"] assert admin_response_data[0]["other_name_list"] == ["uhh", "yes"] assert admin_response_data[1]["type"] == "Device" assert admin_response_data[1]["name"] == "admin-name test" assert admin_response_data[1]["description"] == "desc" - assert admin_response_data[1]["arm_group_label_list"] == ["test", "one"] assert admin_response_data[1]["other_name_list"] == ["uhh", "yes"] assert admin_response_data[2]["type"] == "Device" assert admin_response_data[2]["name"] == "editor-name test" assert admin_response_data[2]["description"] == "desc" - assert admin_response_data[2]["arm_group_label_list"] == ["test", "one"] assert admin_response_data[2]["other_name_list"] == ["uhh", "yes"] assert editor_response_data[0]["type"] == "Device" assert editor_response_data[0]["name"] == "name test" assert editor_response_data[0]["description"] == "desc" - assert editor_response_data[0]["arm_group_label_list"] == ["test", "one"] assert editor_response_data[0]["other_name_list"] == ["uhh", "yes"] assert editor_response_data[1]["type"] == "Device" assert editor_response_data[1]["name"] == "admin-name test" assert editor_response_data[1]["description"] == "desc" - assert editor_response_data[1]["arm_group_label_list"] == ["test", "one"] assert editor_response_data[1]["other_name_list"] == ["uhh", "yes"] assert editor_response_data[2]["type"] == "Device" assert editor_response_data[2]["name"] == "editor-name test" assert editor_response_data[2]["description"] == "desc" - assert editor_response_data[2]["arm_group_label_list"] == ["test", "one"] assert editor_response_data[2]["other_name_list"] == ["uhh", "yes"] assert viewer_response_data[0]["type"] == "Device" assert viewer_response_data[0]["name"] == "name test" assert viewer_response_data[0]["description"] == "desc" - assert viewer_response_data[0]["arm_group_label_list"] == ["test", "one"] assert viewer_response_data[0]["other_name_list"] == ["uhh", "yes"] assert viewer_response_data[1]["type"] == "Device" assert viewer_response_data[1]["name"] == "admin-name test" assert viewer_response_data[1]["description"] == "desc" - assert viewer_response_data[1]["arm_group_label_list"] == ["test", "one"] assert viewer_response_data[1]["other_name_list"] == ["uhh", "yes"] assert viewer_response_data[2]["type"] == "Device" assert viewer_response_data[2]["name"] == "editor-name test" assert viewer_response_data[2]["description"] == "desc" - assert viewer_response_data[2]["arm_group_label_list"] == ["test", "one"] assert viewer_response_data[2]["other_name_list"] == ["uhh", "yes"] diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index d0433a6b..8888f0a4 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -306,7 +306,6 @@ def test_get_version_study_metadata(clients): "type": "Device", "name": "name test", "description": "desc", - "arm_group_label_list": ["test", "one"], "other_name_list": ["uhh", "yes"], } ], From 1837d37aa40963735ff8849b6822966961088586 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 11 Mar 2024 16:48:16 -0700 Subject: [PATCH 450/505] =?UTF-8?q?=F0=9F=94=A8=20chore:=20update=20delete?= =?UTF-8?q?=20script?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dev/drop_tables.sql | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/dev/drop_tables.sql b/dev/drop_tables.sql index ad8e1b08..1e9baea9 100644 --- a/dev/drop_tables.sql +++ b/dev/drop_tables.sql @@ -20,13 +20,19 @@ DROP TABLE IF EXISTS study_location CASCADE; DROP TABLE IF EXISTS study_other CASCADE; DROP TABLE IF EXISTS study_overall_official CASCADE; DROP TABLE IF EXISTS study_reference CASCADE; -DROP TABLE IF EXISTS study_sponsors_collaborators CASCADE; +DROP TABLE IF EXISTS study_central_contact CASCADE; +DROP TABLE IF EXISTS study_sponsors CASCADE; +DROP TABLE IF EXISTS study_collaborators CASCADE; +DROP TABLE IF EXISTS study_conditions CASCADE; +DROP TABLE IF EXISTS study_keywords CASCADE; +DROP TABLE IF EXISTS study_location_contact_list CASCADE; +DROP TABLE IF EXISTS study_oversight CASCADE; DROP TABLE IF EXISTS study_status CASCADE; DROP TABLE IF EXISTS study_redcap CASCADE; DROP TABLE IF EXISTS token_blacklist CASCADE; DROP TABLE IF EXISTS user_details CASCADE; DROP TABLE IF EXISTS dataset_contributor CASCADE; -DROP TABLE IF EXISTS dataset_related_item CASCADE; +DROP TABLE IF EXISTS dataset_related_identifier CASCADE; DROP TABLE IF EXISTS dataset_access CASCADE; DROP TABLE IF EXISTS dataset_alternate_identifier CASCADE; DROP TABLE IF EXISTS dataset_consent CASCADE; @@ -37,10 +43,6 @@ DROP TABLE IF EXISTS dataset_funder CASCADE; DROP TABLE IF EXISTS dataset_healthsheet CASCADE; DROP TABLE IF EXISTS dataset_other CASCADE; DROP TABLE IF EXISTS dataset_record_keys CASCADE; -DROP TABLE IF EXISTS dataset_related_item_contributor CASCADE; -DROP TABLE IF EXISTS dataset_related_item_identifier CASCADE; -DROP TABLE IF EXISTS dataset_related_item_other CASCADE; -DROP TABLE IF EXISTS dataset_related_item_title CASCADE; DROP TABLE IF EXISTS dataset_rights CASCADE; DROP TABLE IF EXISTS dataset_subject CASCADE; DROP TABLE IF EXISTS dataset_title CASCADE; From 8b7c6b15bb8aefc5af6ec79fdc234d787876d06c Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Wed, 13 Mar 2024 20:31:11 -0700 Subject: [PATCH 451/505] =?UTF-8?q?fix:=20=F0=9F=90=9E=20=20version=20meta?= =?UTF-8?q?data=20changes=20(#52)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: schema types for study metadata * fix: metadata return for version steps * fix: test version and collaborators metadata --- apis/study_metadata/study_central_contact.py | 10 ++-- apis/study_metadata/study_collaborators.py | 1 - apis/study_metadata/study_conditions.py | 2 +- apis/study_metadata/study_keywords.py | 2 +- model/dataset_metadata/dataset_contributor.py | 4 +- model/study_metadata/study_collaborators.py | 4 +- model/study_metadata/study_eligibility.py | 2 +- tests/functional/test_study_metadata_api.py | 60 +++++++++---------- tests/functional/test_study_version_api.py | 32 +++++----- 9 files changed, 58 insertions(+), 59 deletions(-) diff --git a/apis/study_metadata/study_central_contact.py b/apis/study_metadata/study_central_contact.py index a3865e94..e7f5da34 100644 --- a/apis/study_metadata/study_central_contact.py +++ b/apis/study_metadata/study_central_contact.py @@ -83,14 +83,14 @@ def validate_is_valid_email(instance): "id": {"type": "string"}, "first_name": {"type": "string", "minLength": 1}, "last_name": {"type": "string", "minLength": 1}, - "degree": {"type": "string", "minLength": 1}, - "identifier": {"type": "string", "minLength": 1}, - "identifier_scheme": {"type": "string", "minLength": 1}, - "identifier_scheme_uri": {"type": "string", "minLength": 1}, + "degree": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, "affiliation": {"type": "string", "minLength": 1}, "affiliation_identifier": { "type": "string", - "minLength": 1, + }, "affiliation_identifier_scheme": { "type": "string", diff --git a/apis/study_metadata/study_collaborators.py b/apis/study_metadata/study_collaborators.py index 63840079..018f3062 100644 --- a/apis/study_metadata/study_collaborators.py +++ b/apis/study_metadata/study_collaborators.py @@ -59,7 +59,6 @@ def post(self, study_id: int): "name", "identifier", "identifier_scheme", - "identifier_scheme_uri", ], }, } diff --git a/apis/study_metadata/study_conditions.py b/apis/study_metadata/study_conditions.py index d0762ab1..c0dfcfcb 100644 --- a/apis/study_metadata/study_conditions.py +++ b/apis/study_metadata/study_conditions.py @@ -53,7 +53,7 @@ def post(self, study_id: int): "properties": { "id": {"type": "string"}, "name": {"type": "string", "minLength": 1}, - "classification_code": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string"}, "scheme": {"type": "string"}, "scheme_uri": {"type": "string"}, "condition_uri": {"type": "string"}, diff --git a/apis/study_metadata/study_keywords.py b/apis/study_metadata/study_keywords.py index 33bcb27d..4e6c420b 100644 --- a/apis/study_metadata/study_keywords.py +++ b/apis/study_metadata/study_keywords.py @@ -52,7 +52,7 @@ def post(self, study_id: int): "properties": { "id": {"type": "string"}, "name": {"type": "string", "minLength": 1}, - "classification_code": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string"}, "scheme": {"type": "string"}, "scheme_uri": {"type": "string"}, "keyword_uri": {"type": "string"}, diff --git a/model/dataset_metadata/dataset_contributor.py b/model/dataset_metadata/dataset_contributor.py index 6ecdb0d9..e2d26108 100644 --- a/model/dataset_metadata/dataset_contributor.py +++ b/model/dataset_metadata/dataset_contributor.py @@ -45,8 +45,8 @@ def to_dict(self): def to_dict_metadata(self): return { "id": self.id, - "given_name": self.given_name, - "family_name": self.family_name, + "first_name": self.given_name, + "last_name": self.family_name, "name_type": self.name_type, "contributor_type": self.contributor_type, "creator": self.creator, diff --git a/model/study_metadata/study_collaborators.py b/model/study_metadata/study_collaborators.py index bcd6f64b..ca02af5d 100644 --- a/model/study_metadata/study_collaborators.py +++ b/model/study_metadata/study_collaborators.py @@ -37,8 +37,8 @@ def to_dict(self): "id": self.id, "name": self.name, "identifier": self.identifier, - "scheme": self.scheme, - "scheme_uri": self.scheme_uri, + "identifier_scheme": self.scheme, + "identifier_scheme_uri": self.scheme_uri, "created_at": self.created_at, } diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 7370383f..8c6ee872 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -71,7 +71,7 @@ def to_dict_metadata(self): """Converts the study metadata to a dictionary""" return { "sex": self.sex, - "minimum_age_value": self.minimum_age_value, + "maximum_age_value": self.maximum_age_value, "gender_based": self.gender_based, } diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_study_metadata_api.py index d3378152..8db5675c 100644 --- a/tests/functional/test_study_metadata_api.py +++ b/tests/functional/test_study_metadata_api.py @@ -726,8 +726,8 @@ def test_post_collaborators_metadata(clients): assert response_data[0]["name"] == "collaborator1123" assert response_data[0]["identifier"] == "collaborator1123" - assert response_data[0]["scheme"] == "collaborator1123" - assert response_data[0]["scheme_uri"] == "collaborator1123" + assert response_data[0]["identifier_scheme"] == "collaborator1123" + assert response_data[0]["identifier_scheme_uri"] == "collaborator1123" admin_response = _admin_client.post( f"/study/{study_id}/metadata/collaborators", @@ -749,8 +749,8 @@ def test_post_collaborators_metadata(clients): assert admin_response_data[0]["name"] == "admin collaborator1123" assert admin_response_data[0]["identifier"] == "collaborator1123" - assert admin_response_data[0]["scheme"] == "collaborator1123" - assert admin_response_data[0]["scheme_uri"] == "collaborator1123" + assert admin_response_data[0]["identifier_scheme"] == "collaborator1123" + assert admin_response_data[0]["identifier_scheme_uri"] == "collaborator1123" editor_response = _editor_client.post( f"/study/{study_id}/metadata/collaborators", @@ -770,8 +770,8 @@ def test_post_collaborators_metadata(clients): assert editor_response_data[0]["name"] == "editor collaborator1123" assert editor_response_data[0]["identifier"] == "collaborator1123" - assert editor_response_data[0]["scheme"] == "collaborator1123" - assert editor_response_data[0]["scheme_uri"] == "collaborator1123" + assert editor_response_data[0]["identifier_scheme"] == "collaborator1123" + assert editor_response_data[0]["identifier_scheme_uri"] == "collaborator1123" viewer_response = _viewer_client.post( f"/study/{study_id}/metadata/collaborators", @@ -814,63 +814,63 @@ def test_get_collaborators_metadata(clients): assert response_data[0]["name"] == "collaborator1123" assert response_data[0]["identifier"] == "collaborator1123" - assert response_data[0]["scheme"] == "collaborator1123" - assert response_data[0]["scheme_uri"] == "collaborator1123" + assert response_data[0]["identifier_scheme"] == "collaborator1123" + assert response_data[0]["identifier_scheme_uri"] == "collaborator1123" assert admin_response_data[0]["name"] == "collaborator1123" assert admin_response_data[0]["identifier"] == "collaborator1123" - assert admin_response_data[0]["scheme"] == "collaborator1123" - assert admin_response_data[0]["scheme_uri"] == "collaborator1123" + assert admin_response_data[0]["identifier_scheme"] == "collaborator1123" + assert admin_response_data[0]["identifier_scheme_uri"] == "collaborator1123" assert editor_response_data[0]["name"] == "collaborator1123" assert editor_response_data[0]["identifier"] == "collaborator1123" - assert editor_response_data[0]["scheme"] == "collaborator1123" - assert editor_response_data[0]["scheme_uri"] == "collaborator1123" + assert editor_response_data[0]["identifier_scheme"] == "collaborator1123" + assert editor_response_data[0]["identifier_scheme_uri"] == "collaborator1123" assert viewer_response_data[0]["name"] == "collaborator1123" assert viewer_response_data[0]["identifier"] == "collaborator1123" - assert viewer_response_data[0]["scheme"] == "collaborator1123" - assert viewer_response_data[0]["scheme_uri"] == "collaborator1123" + assert viewer_response_data[0]["identifier_scheme"] == "collaborator1123" + assert viewer_response_data[0]["identifier_scheme_uri"] == "collaborator1123" assert response_data[1]["name"] == "admin collaborator1123" assert response_data[1]["identifier"] == "collaborator1123" - assert response_data[1]["scheme"] == "collaborator1123" - assert response_data[1]["scheme_uri"] == "collaborator1123" + assert response_data[1]["identifier_scheme"] == "collaborator1123" + assert response_data[1]["identifier_scheme_uri"] == "collaborator1123" assert admin_response_data[1]["name"] == "admin collaborator1123" assert admin_response_data[1]["identifier"] == "collaborator1123" - assert admin_response_data[1]["scheme"] == "collaborator1123" - assert admin_response_data[1]["scheme_uri"] == "collaborator1123" + assert admin_response_data[1]["identifier_scheme"] == "collaborator1123" + assert admin_response_data[1]["identifier_scheme_uri"] == "collaborator1123" assert editor_response_data[1]["name"] == "admin collaborator1123" assert editor_response_data[1]["identifier"] == "collaborator1123" - assert editor_response_data[1]["scheme"] == "collaborator1123" - assert editor_response_data[1]["scheme_uri"] == "collaborator1123" + assert editor_response_data[1]["identifier_scheme"] == "collaborator1123" + assert editor_response_data[1]["identifier_scheme_uri"] == "collaborator1123" assert viewer_response_data[1]["name"] == "admin collaborator1123" assert viewer_response_data[1]["identifier"] == "collaborator1123" - assert viewer_response_data[1]["scheme"] == "collaborator1123" - assert viewer_response_data[1]["scheme_uri"] == "collaborator1123" + assert viewer_response_data[1]["identifier_scheme"] == "collaborator1123" + assert viewer_response_data[1]["identifier_scheme_uri"] == "collaborator1123" assert response_data[2]["name"] == "editor collaborator1123" assert response_data[2]["identifier"] == "collaborator1123" - assert response_data[2]["scheme"] == "collaborator1123" - assert response_data[2]["scheme_uri"] == "collaborator1123" + assert response_data[2]["identifier_scheme"] == "collaborator1123" + assert response_data[2]["identifier_scheme_uri"] == "collaborator1123" assert admin_response_data[2]["name"] == "editor collaborator1123" assert admin_response_data[2]["identifier"] == "collaborator1123" - assert admin_response_data[2]["scheme"] == "collaborator1123" - assert admin_response_data[2]["scheme_uri"] == "collaborator1123" + assert admin_response_data[2]["identifier_scheme"] == "collaborator1123" + assert admin_response_data[2]["identifier_scheme_uri"] == "collaborator1123" assert editor_response_data[2]["name"] == "editor collaborator1123" assert editor_response_data[2]["identifier"] == "collaborator1123" - assert editor_response_data[2]["scheme"] == "collaborator1123" - assert editor_response_data[2]["scheme_uri"] == "collaborator1123" + assert editor_response_data[2]["identifier_scheme"] == "collaborator1123" + assert editor_response_data[2]["identifier_scheme_uri"] == "collaborator1123" assert viewer_response_data[2]["name"] == "editor collaborator1123" assert viewer_response_data[2]["identifier"] == "collaborator1123" - assert viewer_response_data[2]["scheme"] == "collaborator1123" - assert viewer_response_data[2]["scheme_uri"] == "collaborator1123" + assert viewer_response_data[2]["identifier_scheme"] == "collaborator1123" + assert viewer_response_data[2]["identifier_scheme_uri"] == "collaborator1123" def test_delete_collaborators_metadata(clients): diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index 8888f0a4..c016949c 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -430,7 +430,7 @@ def test_get_version_study_metadata(clients): assert response_data["design"]["is_patient_registry"] == "yes" assert response_data["eligibility"]["sex"] == "All" assert response_data["eligibility"]["gender_based"] == "Yes" - assert response_data["eligibility"]["minimum_age_value"] == 18 + assert response_data["eligibility"]["maximum_age_value"] == 61 assert response_data["primary_identifier"]["identifier"] == "test" assert response_data["primary_identifier"]["identifier_type"] == "test" assert response_data["secondary_identifiers"][0]["identifier"] == "test" @@ -494,7 +494,7 @@ def test_get_version_study_metadata(clients): assert admin_response_data["design"]["is_patient_registry"] == "yes" assert admin_response_data["eligibility"]["sex"] == "All" assert admin_response_data["eligibility"]["gender_based"] == "Yes" - assert admin_response_data["eligibility"]["minimum_age_value"] == 18 + assert admin_response_data["eligibility"]["maximum_age_value"] == 61 assert admin_response_data["primary_identifier"]["identifier"] == "test" assert admin_response_data["primary_identifier"]["identifier_type"] == "test" assert admin_response_data["secondary_identifiers"][0]["identifier"] == "test" @@ -564,7 +564,7 @@ def test_get_version_study_metadata(clients): assert editor_response_data["design"]["is_patient_registry"] == "yes" assert editor_response_data["eligibility"]["sex"] == "All" assert editor_response_data["eligibility"]["gender_based"] == "Yes" - assert editor_response_data["eligibility"]["minimum_age_value"] == 18 + assert editor_response_data["eligibility"]["maximum_age_value"] == 61 assert editor_response_data["primary_identifier"]["identifier"] == "test" assert editor_response_data["primary_identifier"]["identifier_type"] == "test" assert editor_response_data["secondary_identifiers"][0]["identifier"] == "test" @@ -634,7 +634,7 @@ def test_get_version_study_metadata(clients): assert viewer_response_data["design"]["is_patient_registry"] == "yes" assert viewer_response_data["eligibility"]["sex"] == "All" assert viewer_response_data["eligibility"]["gender_based"] == "Yes" - assert viewer_response_data["eligibility"]["minimum_age_value"] == 18 + assert viewer_response_data["eligibility"]["maximum_age_value"] == 61 assert viewer_response_data["primary_identifier"]["identifier"] == "test" assert viewer_response_data["primary_identifier"]["identifier_type"] == "test" assert viewer_response_data["secondary_identifiers"][0]["identifier"] == "test" @@ -819,16 +819,16 @@ def test_get_version_dataset_metadata(clients): # seach for main title index in response_data[n]["titles"] # pylint: disable=line-too-long - assert response_data["contributors"][0]["given_name"] == "Given Name here" - assert response_data["contributors"][0]["family_name"] == "Family Name here" + assert response_data["contributors"][0]["last_name"] == "Family Name here" + assert response_data["contributors"][0]["first_name"] == "Given Name here" assert response_data["contributors"][0]["name_type"] == "Personal" assert response_data["contributors"][0]["contributor_type"] == "Con Type" assert response_data["dates"][0]["date"] == "01-01-1970" assert response_data["dates"][0]["type"] == "Type" - assert response_data["creators"][0]["given_name"] == "Given Name here" - assert response_data["creators"][0]["family_name"] == "Family Name here" + assert response_data["creators"][0]["last_name"] == "Family Name here" + assert response_data["creators"][0]["first_name"] == "Given Name here" assert response_data["creators"][0]["name_type"] == "Personal" assert response_data["funders"][0]["name"] == "Name" @@ -869,14 +869,14 @@ def test_get_version_dataset_metadata(clients): ) assert response_data["related_identifier"][0]["resource_type"] == "test" - assert admin_response_data["contributors"][0]["given_name"] == "Given Name here" - assert admin_response_data["contributors"][0]["family_name"] == "Family Name here" + assert admin_response_data["contributors"][0]["first_name"] == "Given Name here" + assert admin_response_data["contributors"][0]["last_name"] == "Family Name here" assert admin_response_data["contributors"][0]["name_type"] == "Personal" assert admin_response_data["contributors"][0]["contributor_type"] == "Con Type" assert admin_response_data["dates"][0]["date"] == "01-01-1970" assert admin_response_data["dates"][0]["type"] == "Type" - assert admin_response_data["creators"][0]["given_name"] == "Given Name here" - assert admin_response_data["creators"][0]["family_name"] == "Family Name here" + assert admin_response_data["creators"][0]["first_name"] == "Given Name here" + assert admin_response_data["creators"][0]["last_name"] == "Family Name here" assert admin_response_data["creators"][0]["name_type"] == "Personal" assert admin_response_data["funders"][0]["name"] == "Name" assert admin_response_data["funders"][0]["identifier"] == "Identifier" @@ -912,14 +912,14 @@ def test_get_version_dataset_metadata(clients): ) assert admin_response_data["related_identifier"][0]["resource_type"] == "test" - assert editor_response_data["contributors"][0]["family_name"] == "Family Name here" - assert editor_response_data["contributors"][0]["given_name"] == "Given Name here" + assert editor_response_data["contributors"][0]["first_name"] == "Given Name here" + assert editor_response_data["contributors"][0]["last_name"] == "Family Name here" assert editor_response_data["contributors"][0]["name_type"] == "Personal" assert editor_response_data["contributors"][0]["contributor_type"] == "Con Type" assert editor_response_data["dates"][0]["date"] == "01-01-1970" assert editor_response_data["dates"][0]["type"] == "Type" - assert editor_response_data["creators"][0]["given_name"] == "Given Name here" - assert editor_response_data["creators"][0]["family_name"] == "Family Name here" + assert editor_response_data["creators"][0]["first_name"] == "Given Name here" + assert editor_response_data["creators"][0]["last_name"] == "Family Name here" assert editor_response_data["creators"][0]["name_type"] == "Personal" assert editor_response_data["funders"][0]["name"] == "Name" assert editor_response_data["funders"][0]["identifier"] == "Identifier" From abe68dd62f181fa99b0b541938bac5e9c0a744ad Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 19 Mar 2024 13:13:54 -0700 Subject: [PATCH 452/505] feat: public dashboard creation and management --- apis/__init__.py | 2 +- apis/dashboard.py | 316 ++++-------------- .../dataset_managing_organization.py | 7 +- .../dataset_related_identifier.py | 20 +- app.py | 41 ++- model/study_dashboard.py | 3 + modules/etl/config/aireadi_config.py | 28 +- sql/init.sql | 6 +- sql/init_timezones.sql | 6 +- sql/specific_tables.sql | 12 +- .../test_study_dataset_metadata_api.py | 8 +- tests/functional/test_study_version_api.py | 30 +- 12 files changed, 164 insertions(+), 315 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 652959de..6d2a32c0 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -18,8 +18,8 @@ from .dataset_metadata.dataset_description import api as description from .dataset_metadata.dataset_funder import api as funder from .dataset_metadata.dataset_healthsheet import api as healthsheet -from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_managing_organization import api as managing_organization +from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_related_identifier import api as related_identifier from .dataset_metadata.dataset_rights import api as rights from .dataset_metadata.dataset_subject import api as subject diff --git a/apis/dashboard.py b/apis/dashboard.py index 47192d36..9aeb4efb 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -49,21 +49,6 @@ }, ) -redcap_project_report_model = api.model( - "RedcapProjectReport", - { - "report_id": fields.String( - required=True, readonly=True, description="REDCap report ID" - ), - "report_key": fields.String( - required=True, readonly=True, description="REDCap report key" - ), - "report_name": fields.String( - required=True, readonly=True, description="REDCap report name" - ), - }, -) - redcap_project_dashboard_module_model = api.model( "RedcapProjectDashboardModule", { @@ -89,6 +74,22 @@ ), }, ) + +redcap_project_report_model = api.model( + "RedcapProjectReport", + { + "report_id": fields.String( + required=True, readonly=True, description="REDCap report ID" + ), + "report_key": fields.String( + required=True, readonly=True, description="REDCap report key" + ), + "report_name": fields.String( + required=True, readonly=True, description="REDCap report name" + ), + }, +) + redcap_project_dashboard_model = api.model( "RedcapProjectDashboard", { @@ -120,8 +121,12 @@ description="REDCap dashboard module", ) ), + "public": fields.Boolean( + required=True, readonly=True, description="Is this REDCap dashboard public?" + ), }, ) + redcap_project_dashboard_module_connector_model = api.model( "RedcapProjectDashboardModuleConnector", { @@ -175,6 +180,9 @@ description="REDCap dashboard module connector", ) ), + "public": fields.Boolean( + required=True, readonly=True, description="Is this REDCap dashboard public?" + ), }, ) @@ -189,7 +197,7 @@ def get(self, study_id: str): """Get all REDCap project dashboards""" study = model.db.session.query(model.Study).get(study_id) if not is_granted("view", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not view", 403 redcap_project_dashboards_query = model.StudyDashboard.query.filter_by( study=study ) @@ -208,7 +216,7 @@ def post(self, study_id: str): """Create REDCap project dashboard""" study = model.Study.query.get(study_id) if not is_granted("add_dashboard", study): - return "Access denied, you can not modify", 403 + return "Access denied, you can not create", 403 # Schema validation schema = { "type": "object", @@ -219,6 +227,7 @@ def post(self, study_id: str): "reports", "name", "modules", + "public", ], "properties": { "redcap_id": {"type": "string", "minLength": 1}, @@ -257,6 +266,7 @@ def post(self, study_id: str): }, "minItems": 1, }, + "public": {"type": "boolean"}, }, } data: Union[Any, Dict[str, Any]] = request.json @@ -297,6 +307,12 @@ def post(self, study_id: str): 400, ) + if not isinstance(data["public"], bool): + return ( + f"""public must be a Boolean to connect a dashboard: + {data['public']}""", + 400, + ) connect_redcap_project_dashboard_data = model.StudyDashboard.from_data( study, data ) @@ -308,104 +324,26 @@ def post(self, study_id: str): return connect_redcap_project_dashboard, 201 -# @api.route("/study//dashboard/add") -# class AddRedcapProjectDashboard(Resource): -# @api.doc(parser=dashboard_parser) -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_dashboard_model) -# def post(self, study_id: str): -# """Create REDCap project dashboard""" -# study = model.Study.query.get(study_id) -# if not is_granted("add_dashboard", study): -# return "Access denied, you can not modify", 403 -# # Schema validation -# schema = { -# "type": "object", -# "additionalProperties": False, -# "required": [ -# "redcap_id", -# "reports", -# "name", -# "modules", -# ], -# "properties": { -# "redcap_id": {"type": "string", "minLength": 1}, -# "reports": { -# "type": "array", -# "items": { -# "anyOf": [ -# { -# "type": "object", -# "properties": { -# "report_id": {"type": "string", "minLength": 0}, -# "report_key": {"type": "string", "minLength": 1}, -# "report_name": {"type": "string", "minLength": 1}, -# }, -# } -# ] -# }, -# "minItems": 1, -# }, -# "name": {"type": "string", "minLength": 1}, -# "modules": { -# "type": "array", -# "items": { -# "anyOf": [ -# { -# "type": "object", -# "properties": { -# "id": {"type": "string", "minLength": 1}, -# "name": {"type": "string", "minLength": 1}, -# "selected": {"type": "boolean"}, -# "report_key": {"type": "string", "minLength": 1}, -# }, -# } -# ] -# }, -# "minItems": 1, -# }, -# }, -# } -# data: Union[Any, Dict[str, Any]] = request.json -# try: -# validate(request.json, schema) -# except ValidationError as e: -# print("validation error") -# return e.message, 400 -# if len(data["redcap_id"]) < 1: -# return ( -# f"""redcap redcap_id is required to connect a dashboard: -# {data['redcap_id']}""", -# 400, -# ) -# if len(data["reports"]) < 1: -# return ( -# f"""redcap reports are required to connect a dashboard: -# {data['reports']}""", -# 400, -# ) -# if len(data["name"]) < 1: -# return ( -# f"""dashboard name is required to connect a dashboard: -# {data['name']}""", -# 400, -# ) -# if len(data["modules"]) < 1: -# return ( -# f"""dashboard modules is required to connect a dashboard: -# {data['name']}""", -# 400, -# ) -# connect_redcap_project_dashboard_data = model.StudyDashboard.from_data( -# study, data -# ) -# model.db.session.add(connect_redcap_project_dashboard_data) -# model.db.session.commit() -# connect_redcap_project_dashboard: Dict[str, Any] = ( -# connect_redcap_project_dashboard_data.to_dict() -# ) -# return connect_redcap_project_dashboard, 201 +@api.route("/study//dashboard/public") +class RedcapProjectDashboardsPublic(Resource): + @api.doc("Get public study dashboards") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model, as_list=True) + def get(self, study_id: str): + """Get all REDCap project dashboards""" + study = model.db.session.query(model.Study).get(study_id) + redcap_project_dashboards_query = model.StudyDashboard.query.filter_by( + study=study + ) + redcap_project_dashboards: List[Dict[str, Any]] = [ + redcap_project_dashboard.to_dict() + for redcap_project_dashboard in redcap_project_dashboards_query + ] + public_redcap_project_dashboards: List[Dict[str, Any]] = list( + filter(lambda dashboard: dashboard["public"], redcap_project_dashboards) + ) + return public_redcap_project_dashboards, 201 @api.route("/study//dashboard//connector") @@ -418,7 +356,7 @@ def get(self, study_id: str, dashboard_id: str): """Get REDCap project dashboard connector""" study = model.db.session.query(model.Study).get(study_id) if not is_granted("view", study): - return "Access denied, you can not get this dashboard", 403 + return "Access denied, you can not view this dashboard connector", 403 # Get Dashboard Connector redcap_project_dashboard_connector_query: Any = model.db.session.query( @@ -443,7 +381,7 @@ def get(self, study_id: str, dashboard_id: str): model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) if not is_granted("view", study): - return "Access denied, you can not get this dashboard", 403 + return "Access denied, you can not view this dashboard", 403 # Retrieve Dashboard Redis Cache cached_redcap_project_dashboard = caching.cache.get( @@ -574,6 +512,7 @@ def put(self, study_id: str, dashboard_id: str): }, "minItems": 1, }, + "public": {"type": "boolean"}, }, } data: Union[Any, Dict[str, Any]] = request.json @@ -618,6 +557,12 @@ def put(self, study_id: str, dashboard_id: str): {data['name']}""", 400, ) + if not isinstance(data["public"], bool): + return ( + f"""public must be a Boolean to connect a dashboard: + {data['public']}""", + 400, + ) redcap_project_dashboard_query = model.StudyDashboard.query.get(dashboard_id) if redcap_project_dashboard_query is None: @@ -642,144 +587,9 @@ def delete(self, study_id: str, dashboard_id: str): """Delete REDCap project dashboard""" study = model.Study.query.get(study_id) if not is_granted("delete_dashboard", study): - return "Access denied, you can not delete this redcap project", 403 + return "Access denied, you can not delete this dashboard", 403 model.StudyDashboard.query.filter_by(id=dashboard_id).delete() model.db.session.commit() return 204 - - -# @api.route("/study//dashboard/edit") -# class EditRedcapProjectDashboard(Resource): -# @api.doc(parser=dashboard_parser) -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_dashboard_model) -# def put(self, study_id: str): -# """Update REDCap project dashboard""" -# study = model.db.session.query(model.Study).get(study_id) -# if not is_granted("update_dashboard", study): -# return "Access denied, you can not modify this dashboard", 403 -# # Schema validation -# schema = { -# "type": "object", -# "additionalProperties": False, -# "required": [ -# "redcap_id", -# "reports", -# "dashboard_id", -# "name", -# "modules", -# ], -# "properties": { -# "redcap_id": {"type": "string", "minLength": 1}, -# "reports": { -# "type": "array", -# "items": { -# "anyOf": [ -# { -# "type": "object", -# "properties": { -# "report_id": {"type": "string", "minLength": 0}, -# "report_key": {"type": "string", "minLength": 1}, -# "report_name": {"type": "string", "minLength": 1}, -# }, -# } -# ] -# }, -# "minItems": 1, -# }, -# "dashboard_id": {"type": "string", "minLength": 1}, -# "name": {"type": "string", "minLength": 1}, -# "modules": { -# "type": "array", -# "items": { -# "anyOf": [ -# { -# "type": "object", -# "properties": { -# "id": {"type": "string", "minLength": 1}, -# "name": {"type": "string", "minLength": 1}, -# "selected": {"type": "boolean"}, -# "report_key": {"type": "string", "minLength": 1}, -# }, -# } -# ] -# }, -# "minItems": 1, -# }, -# }, -# } -# data: Union[Any, Dict[str, Any]] = request.json -# try: -# validate(request.json, schema) -# except ValidationError as e: -# print("validation error") -# return e.message, 400 -# if len(data["redcap_id"]) < 1: -# return ( -# f"""redcap redcap_id is required to connect a dashboard: -# {data['redcap_id']}""", -# 400, -# ) -# if len(data["reports"]) < 1: -# return ( -# f"""redcap reports are required to connect a dashboard: -# {data['reports']}""", -# 400, -# ) -# if len(data["dashboard_id"]) < 1: -# return ( -# f"""dashboard dashboard_id is required to connect a dashboard: -# {data['dashboard_id']}""", -# 400, -# ) -# if len(data["name"]) < 1: -# return ( -# f"""dashboard name is required to connect a dashboard: -# {data['name']}""", -# 400, -# ) -# if len(data["modules"]) < 1: -# return ( -# f"""dashboard modules is required to connect a dashboard: -# {data['name']}""", -# 400, -# ) - -# dashboard_id = data["dashboard_id"] - -# redcap_project_dashboard_query = model.StudyDashboard.query.get(dashboard_id) -# if redcap_project_dashboard_query is None: -# return "An error occurred while updating the dashboard", 500 - -# redcap_project_dashboard_query.update(data) -# model.db.session.commit() -# update_redcap_project_dashboard: Dict[str, Any] = ( -# redcap_project_dashboard_query.to_dict() -# ) - -# # Clear Dashboard from Redis Cache -# caching.cache.delete(f"$study_id#{study_id}$dashboard_id#{dashboard_id}") - -# return update_redcap_project_dashboard, 201 - - -# @api.route("/study//dashboard/delete") -# class DeleteRedcapProjectDashboard(Resource): -# @api.doc(parser=dashboard_parser) -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_dashboard_model) -# def delete(self, study_id: str): -# """Delete REDCap project dashboard""" -# study = model.Study.query.get(study_id) -# if not is_granted("delete_dashboard", study): -# return "Access denied, you can not delete this redcap project", 403 - -# dashboard_id = dashboard_parser.parse_args()["dashboard_id"] -# model.StudyDashboard.query.filter_by(dashboard_id=dashboard_id).delete() -# model.db.session.commit() - -# return 204 diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index c04ce441..9319426e 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -8,7 +8,6 @@ from apis.authentication import is_granted from apis.dataset_metadata_namespace import api - dataset_managing_organization = api.model( "DatasetManagingOrganization", { @@ -51,12 +50,8 @@ def put(self, study_id: int, dataset_id: int): "managing_organization_ror_id": { "type": "string", }, - }, - "required": [ - "managing_organization_name", - "managing_organization_ror_id" - ], + "required": ["managing_organization_name", "managing_organization_ror_id"], } try: validate(instance=request.json, schema=schema) diff --git a/apis/dataset_metadata/dataset_related_identifier.py b/apis/dataset_metadata/dataset_related_identifier.py index b4b76888..3e2ce088 100644 --- a/apis/dataset_metadata/dataset_related_identifier.py +++ b/apis/dataset_metadata/dataset_related_identifier.py @@ -88,13 +88,17 @@ def post(self, study_id: int, dataset_id: int): list_of_elements = [] for i in data: if "id" in i and i["id"]: - dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get(i["id"]) + dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get( + i["id"] + ) if not dataset_related_identifier_: return f"{i['id']} Id is not found", 404 dataset_related_identifier_.update(i) list_of_elements.append(dataset_related_identifier_.to_dict()) elif "id" not in i or not i["id"]: - dataset_related_identifier_ = model.DatasetRelatedIdentifier.from_data(data_obj, i) + dataset_related_identifier_ = model.DatasetRelatedIdentifier.from_data( + data_obj, i + ) model.db.session.add(dataset_related_identifier_) list_of_elements.append(dataset_related_identifier_.to_dict()) model.db.session.commit() @@ -111,16 +115,18 @@ class DatasetRelatedIdentifierUpdate(Resource): @api.response(204, "Success") @api.response(400, "Validation Error") def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - related_identifier_id: int, + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + related_identifier_id: int, ): """Delete dataset related identifier""" study_obj = model.Study.query.get(study_id) if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 - dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get(related_identifier_id) + dataset_related_identifier_ = model.DatasetRelatedIdentifier.query.get( + related_identifier_id + ) model.db.session.delete(dataset_related_identifier_) model.db.session.commit() diff --git a/app.py b/app.py index 94c04970..899c87b8 100644 --- a/app.py +++ b/app.py @@ -6,12 +6,15 @@ import os from datetime import timezone +import click import jwt from flask import Flask, g, request from flask_bcrypt import Bcrypt from flask_cors import CORS from growthbook import GrowthBook from sqlalchemy import MetaData, inspect +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import DropTable from waitress import serve import caching @@ -26,6 +29,12 @@ bcrypt = Bcrypt() +# Add Cascade to Table Drop Call in destroy-schema CLI command +@compiles(DropTable, "postgresql") +def _compile_drop_table(element, compiler): + return f"{compiler.visit_drop_table(element)} CASCADE" + + def create_app(config_module=None, loglevel="INFO"): """Initialize the core application.""" # create and configure the app @@ -141,21 +150,27 @@ def cycle_schema(): model.db.drop_all() model.db.create_all() - @app.cli.command("inspect-schemas") - def inspect_schemas(): - """Print database schemas, tables, and columns to CLI.""" + @app.cli.command("inspect-schema") + @click.argument("schema") + def inspect_schema(schema=None): + """Print database schemas, tables, and columns to CLI. + Optional argument schema. Default all schemas inspected. + """ engine = model.db.session.get_bind() inspector = inspect(engine) - schemas = inspector.get_schema_names() - for schema in schemas: - print("-" * 32) - print(f"Schema: {schema}") - for table_name in inspector.get_table_names(schema=schema): - print(f"\n Table: {table_name}") - for column in inspector.get_columns(table_name, schema=schema): - print(f" Column: {column['name']}") - for k, v in column.items(): - print(f" {k:<16}{str(v):>16}") + schema_names = inspector.get_schema_names() + for schema_name in schema_names: + if schema is None or schema == schema_name: + print("-" * 38) + print(f"SCHEMA: {schema_name}") + print("-" * 38) + for table_name in inspector.get_table_names(schema=schema_name): + print(f" Table: {table_name}") + for column in inspector.get_columns(table_name, schema=schema_name): + print(f" Column: {column['name']}") + for k, v in column.items(): + print(f" {k:<16}{str(v):>16}") + print("\n ", "-" * 36) @app.before_request def on_before_request(): # pylint: disable = inconsistent-return-statements diff --git a/model/study_dashboard.py b/model/study_dashboard.py index c3678654..e4ba0036 100644 --- a/model/study_dashboard.py +++ b/model/study_dashboard.py @@ -24,6 +24,7 @@ class StudyDashboard(db.Model): # type: ignore modules: list[dict[str, (str | bool | int)]] = db.Column( NestedMutableJson, nullable=True ) + public: bool = db.Column(db.Boolean, nullable=True) redcap_pid: int = db.Column(db.BigInteger, nullable=True) reports: list[dict[str, str]] = db.Column(NestedMutableJson, nullable=True) created_at: float = db.Column(db.BigInteger, nullable=False) @@ -63,6 +64,7 @@ def to_dict(self) -> Dict: "redcap_id": self.redcap_id, "redcap_pid": self.redcap_pid, "reports": self.reports, + "public": self.public, "created_at": self.created_at, "updated_on": self.updated_on, } @@ -82,6 +84,7 @@ def update(self, data: Dict) -> Any: "redcap_id", "redcap_pid", "reports", + "public", ] for key, val in data.items(): if key in user_updatable_props: diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 375fd863..374272e9 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -49,15 +49,15 @@ "cmtrt_insln", "cmtrt_glcs", "cmtrt_lfst", - "scrcmpdat", + "dricmpdat", ] computed_columns: List = [ "phenotypes", "treatments", - "scrweek", - "scryear", - "scrdate", + "visitweek", + "visityear", + "visitdate", ] # Survey Column Groups @@ -179,8 +179,8 @@ ( "transform_values_by_column", { - "column": "scrcmpdat", - "new_column_name": "scrweek", + "column": "dricmpdat", + "new_column_name": "visitweek", # ISO 8601 string format token for front-end: %V "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, "missing_value": missing_value_generic, @@ -189,8 +189,8 @@ ( "transform_values_by_column", { - "column": "scrcmpdat", - "new_column_name": "scryear", + "column": "dricmpdat", + "new_column_name": "visityear", # ISO 8601 string format token for front-end: %Y "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, "missing_value": missing_value_generic, @@ -199,8 +199,8 @@ ( "transform_values_by_column", { - "column": "scrcmpdat", - "new_column_name": "scrdate", + "column": "dricmpdat", + "new_column_name": "visitdate", # ISO 8601 string format token for front-end: %Y "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), "missing_value": missing_value_generic, @@ -1210,7 +1210,7 @@ "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "race", "scrdate"], + "groups": ["siteid", "race", "visitdate"], "value": "record_id", "func": "count", } @@ -1230,7 +1230,7 @@ }, "x": { "name": "Week of the Year", - "field": "scrdate", + "field": "visitdate", "missing_value": missing_value_generic, "astype": str, }, @@ -1258,7 +1258,7 @@ "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "phenotypes", "scrdate"], + "groups": ["siteid", "phenotypes", "visitdate"], "value": "record_id", "func": "count", } @@ -1278,7 +1278,7 @@ }, "x": { "name": "Week of the Year", - "field": "scrdate", + "field": "visitdate", "missing_value": missing_value_generic, "astype": str, }, diff --git a/sql/init.sql b/sql/init.sql index 6ecb5e4b..824179be 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -806,6 +806,7 @@ CREATE TABLE IF NOT EXISTS "study_dashboard" ( "modules" UNKNOWN NOT NULL, "reports" UNKNOWN NOT NULL, "study_id" CHAR(36) NOT NULL, + "public" BOOLEAN NOT NULL, "redcap_id" CHAR(36) NOT NULL, "redcap_pid" BIGINT NOT NULL "created_at" BIGINT NOT NULL, @@ -816,8 +817,9 @@ CREATE TABLE IF NOT EXISTS "study_dashboard" ( ); -- Dumping data for table public.study_dashboard: 1 rows /*!40000 ALTER TABLE "study_dashboard" DISABLE KEYS */; -INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "redcap_pid", "reports", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', 12345, '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "redcap_pid", "reports", "public", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'public-dashboard-name', '{}', 12345, '{}', TRUE, '2023-08-13 16:23:48', '2023-08-14 16:23:49') + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000009', 'private-dashboard-name', '{}', 12345, '{}', FALSE, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_dashboard" ENABLE KEYS */; -- Dumping structure for table public.study_reference diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index 5edb972d..bda50aa5 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -927,6 +927,7 @@ CREATE TABLE IF NOT EXISTS "study_dashboard" ( "modules" UNKNOWN NOT NULL, "reports" UNKNOWN NOT NULL, "study_id" CHAR(36) NOT NULL, + "public" BOOLEAN NOT NULL, "redcap_id" CHAR(36) NOT NULL, "redcap_pid" BIGINT NOT NULL "created_at" BIGINT NOT NULL, @@ -937,8 +938,9 @@ CREATE TABLE IF NOT EXISTS "study_dashboard" ( ); -- Dumping data for table public.study_dashboard: 1 rows /*!40000 ALTER TABLE "study_dashboard" DISABLE KEYS */; -INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "redcap_pid", "reports", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000006', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'dashboard-name', '{}', 12345, '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "name", "modules", "redcap_pid", "reports", "public", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000008', 'public-dashboard-name', '{}', 12345, '{}', TRUE, '2023-08-13 16:23:48', '2023-08-14 16:23:49') + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', '00000000-0000-0000-0000-000000000009', 'private-dashboard-name', '{}', 12345, '{}', FALSE, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "study_dashboard" ENABLE KEYS */; -- Dumping structure for table public.study_reference diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index 7e008683..4147a2e5 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -70,12 +70,12 @@ INSERT INTO "study_redcap" ("study_id", "id", "title", "api_pid", "api_url", "ap ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', 'data-stuff', '44444', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA4', 1, '2023-08-13 16:23:48', '2023-08-13 16:23:49'), ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', 'more-stuff', '55555', 'https://redcap.university.org/api', '0000000000000000AAAAAAAAAAAAAAA5', 1, '2023-08-13 16:23:48', '2023-08-13 16:23:49'); -INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "redcap_pid", "reports", "name", "modules", "created_at", "updated_on") VALUES - ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006', '10000000-0000-0000-0000-000000000000', 1234, '{}', 'recruitment', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', '20000000-0000-0000-0000-000000000000', 2345, '{}', 'surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000008', '30000000-0000-0000-0000-000000000000', 3456, '{}', 'participants', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', '40000000-0000-0000-0000-000000000000', 4567, '{}', 'repeat-surveys', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'), - ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', '50000000-0000-0000-0000-000000000000', 5678, '{}', 'more-stuff', '{}', '2023-08-13 16:23:48', '2023-08-14 16:23:49'); +INSERT INTO "study_dashboard" ("study_id", "redcap_id", "id", "redcap_pid", "reports", "name", "modules", "public", "created_at", "updated_on") VALUES + ('00000000-0000-0000-0000-000000000001', '00000000-0000-0000-0000-000000000006', '10000000-0000-0000-0000-000000000000', 1234, '{}', 'recruitment', '{}', TRUE, '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000002', '00000000-0000-0000-0000-000000000007', '20000000-0000-0000-0000-000000000000', 2345, '{}', 'surveys', '{}', TRUE, '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000003', '00000000-0000-0000-0000-000000000008', '30000000-0000-0000-0000-000000000000', 3456, '{}', 'participants', '{}', FALSE, '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000004', '00000000-0000-0000-0000-000000000009', '40000000-0000-0000-0000-000000000000', 4567, '{}', 'repeat-surveys', '{}', FALSE, '2023-08-13 16:23:48', '2023-08-14 16:23:49'), + ('00000000-0000-0000-0000-000000000005', '00000000-0000-0000-0000-000000000000', '50000000-0000-0000-0000-000000000000', 5678, '{}', 'more-stuff', '{}', FALSE, '2023-08-13 16:23:48', '2023-08-14 16:23:49'); /*!40000 ALTER TABLE "dataset" ENABLE KEYS */; diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 5791d104..9ba4c91e 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -3026,7 +3026,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -3056,7 +3056,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -3084,7 +3084,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -3110,7 +3110,7 @@ def test_post_dataset_related_identifier_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index d15c0f36..3aec77de 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -712,7 +712,7 @@ def test_get_version_dataset_metadata(clients): "related_metadata_scheme": "test", "scheme_uri": "test", "scheme_type": "test", - "resource_type": "test" + "resource_type": "test", } ], ) @@ -791,8 +791,12 @@ def test_get_version_dataset_metadata(clients): assert response_data["identifiers"][0]["identifier"] == "identifier test" assert response_data["identifiers"][0]["type"] == "ARK" - assert response_data["related_identifier"][0]["identifier"] == "editor test identifier" - assert response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert ( + response_data["related_identifier"][0]["identifier"] == "editor test identifier" + ) + assert ( + response_data["related_identifier"][0]["relation_type"] == "test relation type" + ) assert response_data["related_identifier"][0]["resource_type"] == "test" assert admin_response_data["contributors"][0]["given_name"] == "Given Name here" @@ -828,8 +832,14 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["identifiers"][0]["identifier"] == "identifier test" assert admin_response_data["identifiers"][0]["type"] == "ARK" - assert admin_response_data["related_identifier"][0]["identifier"] == "editor test identifier" - assert admin_response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert ( + admin_response_data["related_identifier"][0]["identifier"] + == "editor test identifier" + ) + assert ( + admin_response_data["related_identifier"][0]["relation_type"] + == "test relation type" + ) assert admin_response_data["related_identifier"][0]["resource_type"] == "test" assert editor_response_data["contributors"][0]["family_name"] == "Family Name here" @@ -865,8 +875,14 @@ def test_get_version_dataset_metadata(clients): assert editor_response_data["identifiers"][0]["identifier"] == "identifier test" assert editor_response_data["identifiers"][0]["type"] == "ARK" - assert editor_response_data["related_identifier"][0]["identifier"] == "editor test identifier" - assert editor_response_data["related_identifier"][0]["relation_type"] == "test relation type" + assert ( + editor_response_data["related_identifier"][0]["identifier"] + == "editor test identifier" + ) + assert ( + editor_response_data["related_identifier"][0]["relation_type"] + == "test relation type" + ) assert editor_response_data["related_identifier"][0]["resource_type"] == "test" From 06ca5bb24958de89cd2e6b49ef356317ac24a9ca Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 19 Mar 2024 13:17:00 -0700 Subject: [PATCH 453/505] style: format --- apis/study_metadata/study_central_contact.py | 1 - apis/study_metadata/study_oversight.py | 5 ++++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/apis/study_metadata/study_central_contact.py b/apis/study_metadata/study_central_contact.py index e7f5da34..b0c835b0 100644 --- a/apis/study_metadata/study_central_contact.py +++ b/apis/study_metadata/study_central_contact.py @@ -90,7 +90,6 @@ def validate_is_valid_email(instance): "affiliation": {"type": "string", "minLength": 1}, "affiliation_identifier": { "type": "string", - }, "affiliation_identifier_scheme": { "type": "string", diff --git a/apis/study_metadata/study_oversight.py b/apis/study_metadata/study_oversight.py index 71eded68..179f4384 100644 --- a/apis/study_metadata/study_oversight.py +++ b/apis/study_metadata/study_oversight.py @@ -48,7 +48,10 @@ def put(self, study_id: int): "additionalProperties": False, "properties": { "fda_regulated_drug": {"type": ["string", "null"], "minLength": 1}, - "fda_regulated_device": {"type": ["string", "null"], "minLength": 1}, + "fda_regulated_device": { + "type": ["string", "null"], + "minLength": 1, + }, "has_dmc": {"type": ["string", "null"]}, "human_subject_review_status": {"type": "string"}, }, From ac7228d796f1d20a8225ab5966b4bd9dae558b29 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Tue, 19 Mar 2024 13:43:29 -0700 Subject: [PATCH 454/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20=20maganing=20org?= =?UTF-8?q?anization=20table=20(#53)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added managing organization table * fix: api for managing org * fix: api for managing org * fix: test for managing org * fix: model classes of managing organization * style: format --- .../dataset_managing_organization.py | 27 +++-- model/__init__.py | 2 + model/dataset.py | 9 +- .../dataset_managing_organization.py | 52 +++++++++ model/dataset_metadata/dataset_other.py | 15 --- .../test_study_dataset_metadata_api.py | 109 ++++++++---------- tests/functional/test_study_version_api.py | 15 ++- 7 files changed, 135 insertions(+), 94 deletions(-) create mode 100644 model/dataset_metadata/dataset_managing_organization.py diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py index 9319426e..2ca590ae 100644 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ b/apis/dataset_metadata/dataset_managing_organization.py @@ -11,8 +11,10 @@ dataset_managing_organization = api.model( "DatasetManagingOrganization", { - "managing_organization_name": fields.String(required=True), - "managing_organization_ror_id": fields.String(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_scheme": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), }, ) @@ -28,7 +30,7 @@ class DatasetManagingOrganization(Resource): def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument """Get dataset publisher metadata""" dataset_ = model.Dataset.query.get(dataset_id) - managing_organization_ = dataset_.dataset_other + managing_organization_ = dataset_.dataset_managing_organization return managing_organization_.to_dict(), 200 @api.doc("update organization") @@ -46,12 +48,17 @@ def put(self, study_id: int, dataset_id: int): "type": "object", "additionalProperties": False, "properties": { - "managing_organization_name": {"type": "string", "minLength": 1}, - "managing_organization_ror_id": { - "type": "string", - }, + "name": {"type": "string", "minLength": 1}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, }, - "required": ["managing_organization_name", "managing_organization_ror_id"], + "required": [ + "name", + "identifier", + "identifier_scheme", + "identifier_scheme_uri", + ], } try: validate(instance=request.json, schema=schema) @@ -60,7 +67,7 @@ def put(self, study_id: int, dataset_id: int): data = request.json dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_other.update(data) + dataset_.dataset_managing_organization.update(data) model.db.session.commit() - return dataset_.dataset_other.to_dict(), 200 + return dataset_.dataset_managing_organization.to_dict(), 200 diff --git a/model/__init__.py b/model/__init__.py index 54b3be62..33959375 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -10,6 +10,7 @@ from .dataset_metadata.dataset_description import DatasetDescription from .dataset_metadata.dataset_funder import DatasetFunder from .dataset_metadata.dataset_healthsheet import DatasetHealthsheet +from .dataset_metadata.dataset_managing_organization import DatasetManagingOrganization from .dataset_metadata.dataset_other import DatasetOther from .dataset_metadata.dataset_rights import DatasetRights from .dataset_metadata.dataset_subject import DatasetSubject @@ -60,6 +61,7 @@ "DatasetContributor", "StudyContributor", "DatasetOther", + "DatasetManagingOrganization", "DatasetAccess", "DatasetConsent", "DatasetHealthsheet", diff --git a/model/dataset.py b/model/dataset.py index dfb5c0b9..320f2929 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -21,6 +21,7 @@ def __init__(self, study): self.dataset_consent = model.DatasetConsent(self) self.dataset_healthsheet = model.DatasetHealthsheet(self) self.dataset_other = model.DatasetOther(self) + self.dataset_managing_organization = model.DatasetManagingOrganization(self) self.dataset_title.append(model.DatasetTitle(self)) self.dataset_description.append(model.DatasetDescription(self)) @@ -94,6 +95,12 @@ def __init__(self, study): dataset_other = db.relationship( "DatasetOther", back_populates="dataset", uselist=False, cascade="all, delete" ) + dataset_managing_organization = db.relationship( + "DatasetManagingOrganization", + back_populates="dataset", + uselist=False, + cascade="all, delete", + ) dataset_related_identifier = db.relationship( "DatasetRelatedIdentifier", back_populates="dataset", cascade="all, delete" ) @@ -131,7 +138,7 @@ def to_dict_dataset_metadata(self): if not i.creator ], "about": self.dataset_other.to_dict_metadata(), - "managing_organization": self.dataset_other.to_dict_managing_organization(), # type: ignore + "managing_organization": self.dataset_managing_organization.to_dict_metadata(), # type: ignore "access": self.dataset_access.to_dict_metadata(), "consent": self.dataset_consent.to_dict_metadata(), "dates": [i.to_dict_metadata() for i in self.dataset_date], # type: ignore diff --git a/model/dataset_metadata/dataset_managing_organization.py b/model/dataset_metadata/dataset_managing_organization.py new file mode 100644 index 00000000..1869a6c3 --- /dev/null +++ b/model/dataset_metadata/dataset_managing_organization.py @@ -0,0 +1,52 @@ +from ..db import db + + +class DatasetManagingOrganization(db.Model): # type: ignore + def __init__(self, dataset): + self.dataset = dataset + self.name = "" + self.identifier = "" + self.identifier_scheme = "" + self.identifier_scheme_uri = "" + + __tablename__ = "dataset_managing_organization" + + name = db.Column(db.String, nullable=False) + identifier = db.Column(db.String, nullable=False) + identifier_scheme = db.Column(db.String, nullable=False) + identifier_scheme_uri = db.Column(db.String, nullable=False) + + dataset_id = db.Column( + db.CHAR(36), db.ForeignKey("dataset.id"), primary_key=True, nullable=False + ) + dataset = db.relationship("Dataset", back_populates="dataset_managing_organization") + + def to_dict(self): + return { + "name": self.name, + "identifier": self.identifier, + "identifier_scheme": self.identifier_scheme, + "identifier_scheme_uri": self.identifier_scheme_uri, + } + + def to_dict_metadata(self): + return { + "name": self.name, + "identifier": self.identifier, + } + + @staticmethod + def from_data(dataset, data: dict): + dataset_other = DatasetManagingOrganization(dataset) + dataset_other.update(data) + return dataset_other + + def update(self, data: dict): + if "name" in data: + self.name = data["name"] + if "identifier" in data: + self.identifier = data["identifier"] + if "identifier_scheme" in data: + self.identifier_scheme = data["identifier_scheme"] + if "identifier_scheme_uri" in data: + self.identifier_scheme_uri = data["identifier_scheme_uri"] diff --git a/model/dataset_metadata/dataset_other.py b/model/dataset_metadata/dataset_other.py index 411af35e..f6887f0d 100644 --- a/model/dataset_metadata/dataset_other.py +++ b/model/dataset_metadata/dataset_other.py @@ -9,8 +9,6 @@ def __init__(self, dataset): self.dataset = dataset self.resource_type = "" self.language = None - self.managing_organization_name = "" - self.managing_organization_ror_id = "" self.size = "" self.format = "" self.standards_followed = "" @@ -20,8 +18,6 @@ def __init__(self, dataset): resource_type = db.Column(db.String, nullable=False) language = db.Column(db.String, nullable=True) - managing_organization_name = db.Column(db.String, nullable=False) - managing_organization_ror_id = db.Column(db.String, nullable=False) size = db.Column(ARRAY(String), nullable=False) format = db.Column(ARRAY(String), nullable=False) standards_followed = db.Column(db.String, nullable=False) @@ -35,8 +31,6 @@ def __init__(self, dataset): def to_dict(self): return { "language": self.language, - "managing_organization_name": self.managing_organization_name, - "managing_organization_ror_id": self.managing_organization_ror_id, "standards_followed": self.standards_followed, "acknowledgement": self.acknowledgement, "size": self.size, @@ -51,11 +45,6 @@ def to_dict_metadata(self): "resource_type": self.resource_type, } - def to_dict_managing_organization(self): - return { - "managing_organization_name": self.managing_organization_name, - } - @staticmethod def from_data(dataset, data: dict): dataset_other = DatasetOther(dataset) @@ -65,10 +54,6 @@ def from_data(dataset, data: dict): def update(self, data: dict): if "language" in data: self.language = data["language"] - if "managing_organization_name" in data: - self.managing_organization_name = data["managing_organization_name"] - if "managing_organization_ror_id" in data: - self.managing_organization_ror_id = data["managing_organization_ror_id"] if "size" in data: self.size = data["size"] if "format" in data: diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_study_dataset_metadata_api.py index 7090108f..aad43499 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_study_dataset_metadata_api.py @@ -2866,64 +2866,64 @@ def test_put_dataset_managing_organization_metadata(clients): response = _logged_in_client.put( f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "managing_organization_name": "Managing Organization Name", - "managing_organization_ror_id": "Managing Organization ROR ID", + "name": "Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", }, ) assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["managing_organization_name"] == "Managing Organization Name" - assert ( - response_data["managing_organization_ror_id"] == "Managing Organization ROR ID" - ) + assert response_data["name"] == "Managing Organization Name" + assert response_data["identifier"] == "identifier" + assert response_data["identifier_scheme"] == "identifier scheme" + assert response_data["identifier_scheme_uri"] == "identifier scheme_uri" admin_response = _admin_client.put( f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "managing_organization_name": "Managing Admin Organization Name", - "managing_organization_ror_id": "Managing Organization ROR ID", + "name": "admin Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", }, ) assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["managing_organization_name"] - == "Managing Admin Organization Name" - ) - assert ( - admin_response_data["managing_organization_ror_id"] - == "Managing Organization ROR ID" - ) + assert admin_response_data["name"] == "admin Managing Organization Name" + assert admin_response_data["identifier"] == "identifier" + assert admin_response_data["identifier_scheme"] == "identifier scheme" + assert admin_response_data["identifier_scheme_uri"] == "identifier scheme_uri" editor_response = _editor_client.put( f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "managing_organization_name": "Managing Editor Organization Name", - "managing_organization_ror_id": "Managing Organization ROR ID", + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", }, ) assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["managing_organization_name"] - == "Managing Editor Organization Name" - ) - assert ( - editor_response_data["managing_organization_ror_id"] - == "Managing Organization ROR ID" - ) + assert editor_response_data["name"] == "editor Managing Organization Name" + assert editor_response_data["identifier"] == "identifier" + assert editor_response_data["identifier_scheme"] == "identifier scheme" + assert editor_response_data["identifier_scheme_uri"] == "identifier scheme_uri" viewer_response = _viewer_client.put( f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", json={ - "managing_organization_name": "Managing Viewer Organization Name", - "managing_organization_ror_id": "Managing Organization ROR ID", + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", }, ) @@ -2967,40 +2967,25 @@ def test_get_dataset_managing_organization_metadata(clients): # Editor was the last to update the metadata successfully so # the response should reflect that - assert ( - response_data["managing_organization_name"] - == "Managing Editor Organization Name" - ) - assert ( - response_data["managing_organization_ror_id"] == "Managing Organization ROR ID" - ) - - assert ( - admin_response_data["managing_organization_name"] - == "Managing Editor Organization Name" - ) - assert ( - admin_response_data["managing_organization_ror_id"] - == "Managing Organization ROR ID" - ) - - assert ( - editor_response_data["managing_organization_name"] - == "Managing Editor Organization Name" - ) - assert ( - editor_response_data["managing_organization_ror_id"] - == "Managing Organization ROR ID" - ) - - assert ( - viewer_response_data["managing_organization_name"] - == "Managing Editor Organization Name" - ) - assert ( - viewer_response_data["managing_organization_ror_id"] - == "Managing Organization ROR ID" - ) + assert response_data["name"] == "editor Managing Organization Name" + assert response_data["identifier"] == "identifier" + assert response_data["identifier_scheme"] == "identifier scheme" + assert response_data["identifier_scheme_uri"] == "identifier scheme_uri" + + assert admin_response_data["name"] == "editor Managing Organization Name" + assert admin_response_data["identifier"] == "identifier" + assert admin_response_data["identifier_scheme"] == "identifier scheme" + assert admin_response_data["identifier_scheme_uri"] == "identifier scheme_uri" + + assert editor_response_data["name"] == "editor Managing Organization Name" + assert editor_response_data["identifier"] == "identifier" + assert editor_response_data["identifier_scheme"] == "identifier scheme" + assert editor_response_data["identifier_scheme_uri"] == "identifier scheme_uri" + + assert viewer_response_data["name"] == "editor Managing Organization Name" + assert viewer_response_data["identifier"] == "identifier" + assert viewer_response_data["identifier_scheme"] == "identifier scheme" + assert viewer_response_data["identifier_scheme_uri"] == "identifier scheme_uri" # ------------------- RELATED IDENTIFIER METADATA ------------------- # diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_study_version_api.py index c016949c..0b326d74 100644 --- a/tests/functional/test_study_version_api.py +++ b/tests/functional/test_study_version_api.py @@ -855,9 +855,10 @@ def test_get_version_dataset_metadata(clients): assert response_data["de_identification"]["type"] == "Level" assert ( - response_data["managing_organization"]["managing_organization_name"] - == "Managing Editor Organization Name" + response_data["managing_organization"]["name"] + == "editor Managing Organization Name" ) + assert response_data["managing_organization"]["identifier"] == "identifier" assert response_data["identifiers"][0]["identifier"] == "identifier test" assert response_data["identifiers"][0]["type"] == "ARK" @@ -895,9 +896,10 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["de_identification"]["direct"] is True assert admin_response_data["de_identification"]["type"] == "Level" assert ( - admin_response_data["managing_organization"]["managing_organization_name"] - == "Managing Editor Organization Name" + admin_response_data["managing_organization"]["name"] + == "editor Managing Organization Name" ) + assert admin_response_data["managing_organization"]["identifier"] == "identifier" assert admin_response_data["identifiers"][0]["identifier"] == "identifier test" assert admin_response_data["identifiers"][0]["type"] == "ARK" @@ -938,9 +940,10 @@ def test_get_version_dataset_metadata(clients): assert editor_response_data["de_identification"]["direct"] is True assert editor_response_data["de_identification"]["type"] == "Level" assert ( - editor_response_data["managing_organization"]["managing_organization_name"] - == "Managing Editor Organization Name" + editor_response_data["managing_organization"]["name"] + == "editor Managing Organization Name" ) + assert editor_response_data["managing_organization"]["identifier"] == "identifier" assert editor_response_data["identifiers"][0]["identifier"] == "identifier test" assert editor_response_data["identifiers"][0]["type"] == "ARK" From 7f97fea2c09021a58fe415a8cb74568f964ce5ac Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 2 Apr 2024 11:09:02 -0700 Subject: [PATCH 455/505] chore: rebuild database From ecaf661681b8165829721ff5748b7b0a17d0210a Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 2 Apr 2024 12:12:47 -0700 Subject: [PATCH 456/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20chore:=20update?= =?UTF-8?q?=20db?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env.example | 7 ++++--- config.py | 2 +- dev/drop_tables.sql | 1 + 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index dcecd1b8..f3dd1d03 100644 --- a/.env.example +++ b/.env.example @@ -1,8 +1,9 @@ -FAIRHUB_DATABASE_URL="postgresql://admin:root@localhost:5432/fairhub_local" +FAIRHUB_DATABASE_URL="postgresql://admin:root@localhost:5432/fairhub_local?sslmode=disable" + FAIRHUB_SECRET="AddAny32+CharacterCountWordHereAsYourSecret" -FAIRHUB_AZURE_READ_SAS_TOKEN= -FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME= + FAIRHUB_GROWTHBOOK_CLIENT_KEY= + FAIRHUB_CACHE_DEFAULT_TIMEOUT=86400 FAIRHUB_CACHE_KEY_PREFIX=fairhub-io# FAIRHUB_CACHE_HOST=localhost diff --git a/config.py b/config.py index 7666f32d..7f663e75 100644 --- a/config.py +++ b/config.py @@ -17,8 +17,8 @@ def get_env(key): """Return environment variable from .env or native environment.""" return config.get(key) if LOCAL_ENV_FILE else environ.get(key) - FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") + FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") diff --git a/dev/drop_tables.sql b/dev/drop_tables.sql index 1e9baea9..931100fe 100644 --- a/dev/drop_tables.sql +++ b/dev/drop_tables.sql @@ -41,6 +41,7 @@ DROP TABLE IF EXISTS dataset_de_ident_level CASCADE; DROP TABLE IF EXISTS dataset_description CASCADE; DROP TABLE IF EXISTS dataset_funder CASCADE; DROP TABLE IF EXISTS dataset_healthsheet CASCADE; +DROP TABLE IF EXISTS dataset_managing_organization CASCADE; DROP TABLE IF EXISTS dataset_other CASCADE; DROP TABLE IF EXISTS dataset_record_keys CASCADE; DROP TABLE IF EXISTS dataset_rights CASCADE; From ee0eefd0a9b5af06cdf171dcf56ba14978053d15 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 5 Apr 2024 17:42:49 -0700 Subject: [PATCH 457/505] wip: finalizing redcap release ETL --- .gitignore | 1 + apis/dashboard.py | 178 +- apis/redcap.py | 117 +- app.py | 9 + config.py | 6 +- modules/etl/config/__init__.py | 15 +- modules/etl/config/aireadi_config.py | 2163 ++++++++++++++--- modules/etl/transforms/__init__.py | 3 +- ..._transform.py => redcap_live_transform.py} | 110 +- .../transforms/redcap_release_transform.py | 935 +++++++ poetry.lock | 70 +- pyproject.toml | 1 + 12 files changed, 3097 insertions(+), 511 deletions(-) rename modules/etl/transforms/{redcap_transform.py => redcap_live_transform.py} (96%) create mode 100644 modules/etl/transforms/redcap_release_transform.py diff --git a/.gitignore b/.gitignore index 9c9e87e9..da55802a 100644 --- a/.gitignore +++ b/.gitignore @@ -41,3 +41,4 @@ coverage # Database postgres-data/* redis-data/* +storage/* diff --git a/apis/dashboard.py b/apis/dashboard.py index 9aeb4efb..b588e4a3 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -8,8 +8,12 @@ import caching import model -from modules.etl import ModuleTransform, RedcapTransform -from modules.etl.config import moduleTransformConfigs, redcapTransformConfig +from modules.etl import ModuleTransform, RedcapLiveTransform, RedcapReleaseTransform +from modules.etl.config import ( + moduleTransformConfigs, + redcapLiveTransformConfig, + redcapReleaseTransformConfig, +) from .authentication import is_granted @@ -66,6 +70,11 @@ "selected": fields.Boolean( required=True, readonly=True, description="Dashboard module is selected" ), + "public": fields.Boolean( + required=True, + readonly=True, + description="Dashboard module is publicly available", + ), "visualizations": fields.List( fields.Nested(visualization_model), required=True, @@ -87,6 +96,16 @@ "report_name": fields.String( required=True, readonly=True, description="REDCap report name" ), + "report_has_modules": fields.Boolean( + required=True, + readonly=True, + description="REDCap report is associated with one or more dashboard modules", + ), + "public": fields.Boolean( + required=True, + readonly=True, + description="Dashboard module is publicly available", + ), }, ) @@ -147,6 +166,11 @@ "selected": fields.Boolean( required=True, readonly=True, description="Dashboard module is selected" ), + "public": fields.Boolean( + required=True, + readonly=True, + description="Dashboard module is publicly available", + ), }, ) redcap_project_dashboard_connector_model = api.model( @@ -242,6 +266,8 @@ def post(self, study_id: str): "report_id": {"type": "string", "minLength": 0}, "report_key": {"type": "string", "minLength": 1}, "report_name": {"type": "string", "minLength": 1}, + "report_has_modules": {"type": "boolean"}, + "public": {"type": "boolean"}, }, } ] @@ -259,6 +285,7 @@ def post(self, study_id: str): "id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "selected": {"type": "boolean"}, + "public": {"type": "boolean"}, "report_key": {"type": "string", "minLength": 1}, }, } @@ -391,6 +418,8 @@ def get(self, study_id: str, dashboard_id: str): if cached_redcap_project_dashboard is not None: return cached_redcap_project_dashboard, 201 + transformConfig = redcapLiveTransformConfig + redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) @@ -406,23 +435,43 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() # Set report_ids for ETL + report_keys = [] for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(redcapTransformConfig["reports"]): + for i, report_config in enumerate(transformConfig["reports"]): if ( - report["report_key"] == report_config["key"] - and len(report["report_id"]) > 0 + len(report["report_id"]) > 0 + and report["report_key"] == report_config["key"] ): - redcapTransformConfig["reports"][i]["kwdargs"][ + report_keys.append(report["report_key"]) + transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ "report_id" - ] = report["report_id"] + ] + + # Remove Unused Reports + transformConfig["reports"] = [ + report + for report in redcapLiveTransformConfig["reports"] + if report["key"] in report_keys + ] + + # Set Post Transform Merge + index_columns, post_transform_merges = transformConfig["post_transform_merge"] + transformConfig["post_transform_merge"] = ( + index_columns, + [ + (report_key, transform_kwdargs) + for report_key, transform_kwdargs in post_transform_merges + if report_key in report_keys + ], + ) # Structure REDCap ETL Config redcap_etl_config = { "redcap_api_url": redcap_project_view["api_url"], "redcap_api_key": redcap_project_view["api_key"], - } | redcapTransformConfig + } | transformConfig - redcapTransform = RedcapTransform(redcap_etl_config) + redcapTransform = RedcapLiveTransform(redcap_etl_config) # Execute Dashboard Module Transforms for dashboard_module in redcap_project_dashboard["modules"]: @@ -450,7 +499,7 @@ def get(self, study_id: str, dashboard_id: str): f"$study_id#{study_id}$dashboard_id#{dashboard_id}", redcap_project_dashboard, ) - + print("Live Transform") return redcap_project_dashboard, 201 @api.doc("Update a study dashboard") @@ -487,6 +536,8 @@ def put(self, study_id: str, dashboard_id: str): "report_id": {"type": "string", "minLength": 0}, "report_key": {"type": "string", "minLength": 1}, "report_name": {"type": "string", "minLength": 1}, + "report_has_modules": {"type": "boolean"}, + "public": {"type": "boolean"}, }, } ] @@ -505,6 +556,7 @@ def put(self, study_id: str, dashboard_id: str): "id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "selected": {"type": "boolean"}, + "public": {"type": "boolean"}, "report_key": {"type": "string", "minLength": 1}, }, } @@ -593,3 +645,109 @@ def delete(self, study_id: str, dashboard_id: str): model.db.session.commit() return 204 + + +@api.route("/study//dashboard//release") +class RedcapProjectDashboardRelease(Resource): + @api.doc("Get a study dashboard") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(redcap_project_dashboard_model) + def get(self, study_id: str, dashboard_id: str): + """Get REDCap project dashboard""" + model.db.session.flush() + study = model.db.session.query(model.Study).get(study_id) + if not is_granted("view", study): + return "Access denied, you can not view this dashboard", 403 + + # Retrieve Dashboard Redis Cache + cached_redcap_project_dashboard = caching.cache.get( + f"$study_id#{study_id}$dashboard_id#{dashboard_id}#release" + ) + + if cached_redcap_project_dashboard is not None: + return cached_redcap_project_dashboard, 201 + + transformConfig = redcapReleaseTransformConfig + + redcap_project_dashboard_query: Any = model.db.session.query( + model.StudyDashboard + ).get(dashboard_id) + redcap_project_dashboard: Dict[ + str, Any + ] = redcap_project_dashboard_query.to_dict() + + # Get REDCap Project + redcap_id = redcap_project_dashboard["redcap_id"] + redcap_project_view_query: Any = model.db.session.query(model.StudyRedcap).get( + redcap_id + ) + redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() + + # Set report_ids for ETL + report_keys = [] + for report in redcap_project_dashboard["reports"]: + for i, report_config in enumerate(transformConfig["reports"]): + if ( + len(report["report_id"]) > 0 + and report["report_key"] == report_config["key"] + ): + report_keys.append(report["report_key"]) + transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ + "report_id" + ] + + # Remove Unused Reports + transformConfig["reports"] = [ + report + for report in redcapLiveTransformConfig["reports"] + if report["key"] in report_keys + ] + print(transformConfig["reports"]) + # Set Post Transform Merge + index_columns, post_transform_merges = transformConfig["post_transform_merge"] + transformConfig["post_transform_merge"] = ( + index_columns, + [ + (report_key, transform_kwdargs) + for report_key, transform_kwdargs in post_transform_merges + if report_key in report_keys + ], + ) + + # Structure REDCap ETL Config + redcap_etl_config = { + "redcap_api_url": redcap_project_view["api_url"], + "redcap_api_key": redcap_project_view["api_key"], + } | transformConfig + + redcapTransform = RedcapReleaseTransform(redcap_etl_config) + + # Execute Dashboard Module Transforms + for dashboard_module in redcap_project_dashboard["modules"]: + if dashboard_module["selected"]: + mergedTransform = redcapTransform.merged + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] + moduleTransform = ModuleTransform(module_etl_config) + transformed = getattr(moduleTransform, transform)( + mergedTransform + ).transformed + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": transformed, + } + else: + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": [], + } + + # Create Dashboard Redis Cache + caching.cache.set( + f"$study_id#{study_id}$dashboard_id#{dashboard_id}#release", + redcap_project_dashboard, + ) + print("Release Transform") + return redcap_project_dashboard, 201 diff --git a/apis/redcap.py b/apis/redcap.py index 6e669482..7c24d120 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -12,28 +12,29 @@ api = Namespace("Redcap", description="REDCap operations", path="/") -redcap_project_view_model = api.model( + +redcap_project_api_model = api.model( "RedcapProjectAPI", { "study_id": fields.String(required=True, description="Study ID"), "id": fields.String(required=True, description="REDCap project ID"), "title": fields.String(required=True, description="REDCap project title"), "api_pid": fields.String(required=True, description="REDCap project PID"), + "api_key": fields.String(required=True, description="REDCap project API key"), "api_url": fields.String(required=True, description="REDCap project API url"), "api_active": fields.Boolean( required=True, description="REDCap project is active" ), }, ) - -redcap_api_model = api.model( - "RedcapProjectAPI", +# Omit API Key from View +redcap_project_api_view_model = api.model( + "RedcapProjectAPIView", { "study_id": fields.String(required=True, description="Study ID"), "id": fields.String(required=True, description="REDCap project ID"), "title": fields.String(required=True, description="REDCap project title"), "api_pid": fields.String(required=True, description="REDCap project PID"), - "api_key": fields.String(required=True, description="REDCap project API key"), "api_url": fields.String(required=True, description="REDCap project API url"), "api_active": fields.Boolean( required=True, description="REDCap project is active" @@ -43,11 +44,11 @@ @api.route("/study//redcap") -class RedcapProjectAPILink(Resource): +class RedcapProjectAPIViews(Resource): @api.doc("Get all REDCap project API links") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_view_model, as_list=True) + @api.marshal_with(redcap_project_api_view_model, as_list=True) def get(self, study_id: str): """Get all REDCap project API links""" study = model.Study.query.get(study_id) @@ -66,7 +67,7 @@ def get(self, study_id: str): @api.doc("Create a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_api_model) + @api.marshal_with(redcap_project_api_model) def post(self, study_id: str): """Create REDCap project API link""" study = model.Study.query.get(study_id) @@ -136,87 +137,13 @@ def post(self, study_id: str): return add_redcap_api, 201 -# @api.route("/study//redcap/add") -# class AddRedcapProjectAPI(Resource): -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_view_model) -# def post(self, study_id: int): -# """Create REDCap project API link""" -# study = model.Study.query.get(study_id) -# if not is_granted("add_redcap", study): -# return "Access denied, you can not create a redcap project", 403 -# # Schema validation -# data: Union[Any, dict] = request.json -# schema = { -# "type": "object", -# "additionalProperties": False, -# "required": [ -# "title", -# "api_pid", -# "api_url", -# "api_key", -# "api_active", -# ], -# "properties": { -# "title": {"type": "string", "minLength": 1}, -# "api_pid": {"type": "string", "minLength": 5}, -# "api_url": {"type": "string", "minLength": 1}, -# "api_key": {"type": "string", "minLength": 32}, -# "api_active": {"type": "boolean"}, -# }, -# } - -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 - -# if len(data["title"]) < 1: -# return ( -# f"""redcap title is required for redcap access: -# {data['title']}""", -# 400, -# ) -# if len(data["api_pid"]) < 1: -# return ( -# f"""redcap api_pid is required for redcap access: -# {data['api_pid']}""", -# 400, -# ) -# if len(data["api_url"]) < 1: -# return ( -# f"""redcap api_url is required for redcap access: -# {data['api_url']}""", -# 400, -# ) -# if len(data["api_key"]) < 1: -# return ( -# f"""redcap api_key is required for redcap access: -# {data['api_key']}""", -# 400, -# ) -# if not isinstance(data["api_active"], bool): -# return ( -# f"""redcap api_active is required for redcap access: -# {data['api_active']}""", -# 400, -# ) - -# add_redcap_api = model.StudyRedcap.from_data(study, data) -# model.db.session.add(add_redcap_api) -# model.db.session.commit() -# add_redcap_api = add_redcap_api.to_dict() -# return add_redcap_api, 201 - - @api.route("/study//redcap/") -class RedcapProjectAPI(Resource): +class RedcapProjectAPIView(Resource): # Get a REDCap API Link @api.doc("Get a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_view_model) + @api.marshal_with(redcap_project_api_view_model) def get(self, study_id: str, redcap_id: str): """Get REDCap project API link""" study = model.db.session.query(model.Study).get(study_id) @@ -232,7 +159,7 @@ def get(self, study_id: str, redcap_id: str): @api.doc("Update a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_view_model) + @api.marshal_with(redcap_project_api_view_model) def put(self, study_id: str, redcap_id: str): """Update REDCap project API link""" study = model.Study.query.get(study_id) @@ -244,12 +171,14 @@ def put(self, study_id: str, redcap_id: str): "type": "object", "additionalProperties": False, "required": [ + "id", "title", "api_pid", "api_url", "api_active", ], "properties": { + "id": {"type": "string", "minLength": 36, "maxLength": 36}, "title": {"type": "string", "minLength": 1}, "api_pid": {"type": "string", "minLength": 5}, "api_url": {"type": "string", "minLength": 1}, @@ -261,6 +190,18 @@ def put(self, study_id: str, redcap_id: str): except ValidationError as e: return e.message, 400 + if len(data["id"]) != 36: + return ( + f"""redcap id is required for redcap access and must be a length-36 string UUID: + {data['id']}""", + 400, + ) + if data["id"] != redcap_id: + return ( + f"""redcap id in post body and URL must be consistent: + {data['id']} != {redcap_id}""", + 400, + ) if len(data["title"]) < 1: return ( f"""redcap title is required for redcap access: @@ -296,7 +237,7 @@ def put(self, study_id: str, redcap_id: str): @api.doc("Delete a REDCap project API link") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_view_model) + @api.marshal_with(redcap_project_api_view_model) def delete(self, study_id: str, redcap_id: str): """Delete REDCap project API link""" study = model.Study.query.get(study_id) @@ -312,7 +253,7 @@ def delete(self, study_id: str, redcap_id: str): # @api.doc(parser=project_parser) # @api.response(200, "Success") # @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_view_model) +# @api.marshal_with(redcap_project_api_view_model) # def put(self, study_id: int): # """Update REDCap project API link""" # study = model.Study.query.get(study_id) @@ -379,7 +320,7 @@ def delete(self, study_id: str, redcap_id: str): # @api.doc(parser=project_parser) # @api.response(200, "Success") # @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_view_model) +# @api.marshal_with(redcap_project_api_view_model) # def delete(self, study_id: int): # """Delete REDCap project API link""" # study = model.Study.query.get(study_id) diff --git a/app.py b/app.py index 899c87b8..00f93415 100644 --- a/app.py +++ b/app.py @@ -150,6 +150,15 @@ def cycle_schema(): model.db.drop_all() model.db.create_all() + @app.cli.command("list-schemas") + def list_schemas(): + engine = model.db.session.get_bind() + inspector = inspect(engine) + schema_names = inspector.get_schema_names() + print("SCHEMAS") + for schema_name in schema_names: + print(schema_name) + @app.cli.command("inspect-schema") @click.argument("schema") def inspect_schema(schema=None): diff --git a/config.py b/config.py index 7666f32d..8a7fa12a 100644 --- a/config.py +++ b/config.py @@ -12,16 +12,14 @@ # Load environment variables from .env config = dotenv_values(".env") - def get_env(key): """Return environment variable from .env or native environment.""" return config.get(key) if LOCAL_ENV_FILE else environ.get(key) - FAIRHUB_DATABASE_URL = get_env("FAIRHUB_DATABASE_URL") FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") - FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") - +FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") +FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_CONTAINER") FAIRHUB_GROWTHBOOK_CLIENT_KEY = get_env("FAIRHUB_GROWTHBOOK_CLIENT_KEY") diff --git a/modules/etl/config/__init__.py b/modules/etl/config/__init__.py index b3a0c6ed..3563b121 100644 --- a/modules/etl/config/__init__.py +++ b/modules/etl/config/__init__.py @@ -1,10 +1,23 @@ from .aireadi_config import ( - redcapTransformConfig, + redcapLiveTransformConfig, + redcapReleaseTransformConfig, + deviceCollectionStatusBySiteTransformConfig, + surveyCompletionStatusTransformConfig, + surveyCompletionStatusBySiteTransformConfig, instrumentCompletionStatusBySiteTransformConfig, + phenotypeRecruitmentTransformConfig, phenotypeRecruitmentBySiteTransformConfig, + raceRecruitmentTransformConfig, raceRecruitmentBySiteTransformConfig, + sexRecruitmentTransformConfig, + sexRecruitmentBySiteTransformConfig, raceSexBySiteTransformConfig, phenotypeRaceBySexTransformConfig, + phenotypeSexByRaceTransformConfig, + racePhenotypeBySexTransformConfig, + raceSexByPhenotypeTransformConfig, + sexRaceByPhenotypeTransformConfig, + sexPhenotypeByRaceTransformConfig, phenotypeSexBySiteTransformConfig, currentMedicationsBySiteTransformConfig, ) diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 374272e9..ebadf5c3 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -119,6 +119,7 @@ "2": "Complete", "1": "Unverified", "0": "Incomplete", + "": "Value Unavailable", } phenotypes_column_map: Dict[str, str] = { @@ -127,6 +128,18 @@ "mh_a1c": "Elevated A1C", } +# sex_column_map: Dict[str, str] = { +# "M": "Male", +# "F": "Female", +# "I": "Intersex", +# "888": "Other", +# "777": "Prefer not to say", +# } + +# race_column_map: Dict[str, str] = { + +# } + treatments_column_map: Dict[str, str] = { "cmtrt_a1c": "Oral Medication", "cmtrt_glcs": "Non-Insulin Injectable", @@ -138,11 +151,163 @@ # REDCap Report Merge Map # -redcap_report_merge_map: Dict[str, Dict[str, Any]] = { - "participant-list": {"on": index_columns, "how": "inner"}, - "participant-values": {"on": index_columns, "how": "inner"}, - "instrument-status": {"on": index_columns, "how": "inner"}, - "repeat-instrument": {"on": index_columns, "how": "outer"}, +redcap_report_merge_map: List[Tuple[str, Dict[str, Any]]] = [ + ("participant-list", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("participant-values", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("instrument-status", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("repeat-instrument", {"on": index_columns, "how": "outer", "suffixes": (None, '_merged')}), +] + +# +# REDCap API Transform Config +# + +# Note: The REDCap report_id is matched to the transform +# by the value of the key property in the report dictionary. +redcapLiveTransformConfig: Dict[str, Any] = { + "redcap_data_dir": "storage/release/raw-storage", + "project_metadata": { + "filepath": "AI-READI/REDCap", + "filename": "Redcap_project_metadata.json", + }, + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] + { + "key": "participant-list", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_247884.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "", + }, + "transforms": [], + }, + { + "key": "participant-values", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_242544.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "", + }, + "transforms": [ + ("remap_values_by_columns", {"columns": data_columns}), + ("map_missing_values_by_columns", {"columns": data_columns}), + ( + "transform_values_by_column", + { + "column": "dricmpdat", + "new_column_name": "visitweek", + # ISO 8601 string format token for front-end: %V + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, + "missing_value": missing_value_generic, + }, + ), + ( + "transform_values_by_column", + { + "column": "dricmpdat", + "new_column_name": "visityear", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, + "missing_value": missing_value_generic, + }, + ), + ( + "transform_values_by_column", + { + "column": "dricmpdat", + "new_column_name": "visitdate", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), + "missing_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": phenotypes_column_map, + "new_column_name": "phenotypes", + "all_negative_value": "Control", + "default_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": treatments_column_map, + "new_column_name": "treatments", + "all_negative_value": "No Treatments", + "default_value": missing_value_generic, + }, + ), + ( + "keep_columns", + {"columns": index_columns + data_columns + computed_columns}, + ), + ], + }, + { + "key": "instrument-status", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_251954.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "", + }, + "transforms": [ + ( + "remap_values_by_columns", + {"columns": survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": survey_columns}), + ("keep_columns", {"columns": index_columns + survey_columns}), + ], + }, + { + "key": "repeat-instrument", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_259920.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + "report_id": "", + }, + "transforms": [ + ("drop_rows", {"columns": repeat_survey_columns}), + ( + "aggregate_repeat_instrument_by_index", + {"aggregator": "max", "dtype": str}, + ), + ( + "keep_columns", + {"columns": index_columns + repeat_survey_data_columns}, + ), + ], + }, + ], + "post_transform_merge": ( + index_columns, redcap_report_merge_map + ), + "post_merge_transforms": [ + ( + "remap_values_by_columns", + {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), + ], + "index_columns": ["record_id"], + "missing_value_generic": missing_value_generic, } # @@ -151,10 +316,17 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. -redcapTransformConfig: Dict[str, Any] = { +redcapReleaseTransformConfig: Dict[str, Any] = { + "redcap_data_dir": "storage/release/raw-storage", + "project_metadata": { + "filepath": "AI-READI/REDCap", + "filename": "Redcap_project_metadata.json", + }, "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] { "key": "participant-list", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_247884.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -166,6 +338,8 @@ }, { "key": "participant-values", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_242544.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -232,6 +406,8 @@ }, { "key": "instrument-status", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_251954.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -250,6 +426,8 @@ }, { "key": "repeat-instrument", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_259920.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -271,13 +449,7 @@ }, ], "post_transform_merge": ( - index_columns, - [ - ("participant-list", {"on": index_columns, "how": "inner"}), - ("participant-values", {"on": index_columns, "how": "inner"}), - ("instrument-status", {"on": index_columns, "how": "inner"}), - ("repeat-instrument", {"on": index_columns, "how": "outer"}), - ], + index_columns, redcap_report_merge_map ), "post_merge_transforms": [ ( @@ -290,6 +462,7 @@ "missing_value_generic": missing_value_generic, } + # # Visualization Transforms # @@ -1198,33 +1371,33 @@ }, ) -# Recruitment Counts by Site -raceRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Phenotype Recruitment Counts by Site +phenotypeRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "race-recruitment-by-site", + "key": "phenotype-recruitment", "strict": True, "transforms": [ { - "name": "Race Recruitment by Site", + "name": "Phenotype Recruitment", "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "race", "visitdate"], + "groups": ["phenotypes", "visitdate"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, "group": { - "name": "Race", - "field": "race", + "name": "Phenotype", + "field": "phenotypes", "missing_value": missing_value_generic, "astype": str, }, @@ -1246,7 +1419,7 @@ }, ) -# Recruitment Counts by Site +# Phenotype Recruitment Counts by Site phenotypeRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { @@ -1294,43 +1467,44 @@ }, ) -# Race & Sex Counts by Race -raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Race Recruitment Counts +raceRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "race-sex-by-site", + "key": "race-recruitment", "strict": True, "transforms": [ { - "name": "Race & Sex by Site", - "vtype": "DoubleCategorical", + "name": "Race Recruitment", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["scrsex", "race", "siteid"], + "groups": ["race", "visitdate"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Sex", - "field": "scrsex", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { - "name": "Race", - "field": "race", + "x": { + "name": "Week of the Year", + "field": "visitdate", "missing_value": missing_value_generic, "astype": str, }, - "value": { - "name": "Count (N)", + "y": { + "name": "Cumulative Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, @@ -1341,19 +1515,19 @@ }, ) -# Phenotype & Sex Counts by Race -phenotypeSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Race Recruitment Counts by Site +raceRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "phenotype-sex-by-site", + "key": "race-recruitment-by-site", "strict": True, "transforms": [ { - "name": "Phenotype & Sex by Site", - "vtype": "DoubleCategorical", + "name": "Race Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["scrsex", "phenotypes", "siteid"], + "groups": ["siteid", "race", "visitdate"], "value": "record_id", "func": "count", } @@ -1363,21 +1537,22 @@ "name": "Site", "field": "siteid", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Sex", - "field": "scrsex", + "name": "Race", + "field": "race", "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", + "x": { + "name": "Week of the Year", + "field": "visitdate", "missing_value": missing_value_generic, "astype": str, }, - "value": { - "name": "Count (N)", + "y": { + "name": "Cumulative Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, @@ -1388,19 +1563,19 @@ }, ) -# Phenotype & Site Counts by Sex -phenotypeSiteBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Sex Recruitment Counts +sexRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "phenotype-site-by-sex", + "key": "sex-recruitment", "strict": True, "transforms": [ { - "name": "Phenotype & Site by Sex", - "vtype": "DoubleCategorical", + "name": "Sex Recruitment", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["scrsex", "phenotypes", "siteid"], + "groups": ["scrsex", "visitdate"], "value": "record_id", "func": "count", } @@ -1410,21 +1585,22 @@ "name": "Sex", "field": "scrsex", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Site", - "field": "siteid", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", + "x": { + "name": "Week of the Year", + "field": "visitdate", "missing_value": missing_value_generic, "astype": str, }, - "value": { - "name": "Count (N)", + "y": { + "name": "Cumulative Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, @@ -1435,43 +1611,44 @@ }, ) -# Phenotype & Race Counts by Sex -phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Sex Recruitment Counts By Site +sexRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "phenotype-race-by-sex", + "key": "sex-recruitment-by-site", "strict": True, "transforms": [ { - "name": "Phenotype & Race by Sex", - "vtype": "DoubleCategorical", + "name": "Sex Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["siteid", "scrsex", "visitdate"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Sex", - "field": "scrsex", + "name": "Site", + "field": "siteid", "missing_value": missing_value_generic, + "astype": str, }, "group": { - "name": "Phenotype", - "field": "phenotypes", + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { - "name": "Race", - "field": "race", + "x": { + "name": "Week of the Year", + "field": "visitdate", "missing_value": missing_value_generic, "astype": str, }, - "value": { - "name": "Count (N)", + "y": { + "name": "Cumulative Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, @@ -1482,18 +1659,1302 @@ }, ) -currentMedicationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( +# Race & Sex Counts by Race +raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", { - "key": "current-medications-by-site", + "key": "race-sex-by-site", "strict": True, "transforms": [ { - "name": "Current Medications by Site", + "name": "Race & Sex by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "race", "siteid"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Phenotype & Sex Counts by Race +phenotypeSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-sex-by-site", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Sex by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "phenotypes", "siteid"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Phenotype & Site Counts by Sex +phenotypeSiteBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-site-by-sex", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Site by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "phenotypes", "siteid"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Phenotype & Race Counts by Sex +phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-race-by-sex", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Race by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Phenotype & Sex Counts by Race +phenotypeSexByRaceTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-sex-by-race", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Sex by Race", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Sex & Phenotype Counts by Race +sexPhenotypeByRaceTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "sex-phenotype-by-race", + "strict": True, + "transforms": [ + { + "name": "Sex & Phenotype by Race", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Sex & Race Counts by Phenotype +sexRaceByPhenotypeTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "sex-race-by-phenotype", + "strict": True, + "transforms": [ + { + "name": "Sex & Race by Phenotype", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race & Sex Counts by Phenotype +raceSexByPhenotypeTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-sex-by-phenotype", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Phenotype", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race & Phenotype Counts by Sex +racePhenotypeBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-phenotype-by-sex", + "strict": True, + "transforms": [ + { + "name": "Race & Phenotype by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + + +currentMedicationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "current-medications-by-site", + "strict": True, + "transforms": [ + { + "name": "Current Medications by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "current_medications", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Current Medication Count", + "field": "current_medications", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Participants (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + } + ], + }, +) + +# Overview +deviceCollectionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "compoundTransform", + { + "key": "device-collection-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "BCVA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Photopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Mesopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "mesopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Monofilament", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "monofilament_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "ECG Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ecg_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Lab Results Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "lab_results_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Specimen Management", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "specimen_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Return", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_return_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Overview +instrumentCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( + "compoundTransform", + { + "key": "instrument-completion-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Recruitment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "recruitment_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "FAQ Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "faq_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Screening Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "screening_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Preconsent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Consent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Staff Consent Attestation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "current_medications", "scrsex"], + "groups": ["siteid", "dietary_survey_complete"], "value": "record_id", "func": "count", } @@ -1506,42 +2967,32 @@ "astype": str, }, "group": { - "name": "Current Medication Count", - "field": "current_medications", + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Sex", - "field": "scrsex", + "name": "Dietary Survey", + "field": "dietary_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "value": { - "name": "Participants (N)", + "name": "Count (N)", "field": "record_id", "missing_value": missing_value_generic, "astype": int, }, }, - } - ], - }, -) - -# Overview -instrumentCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "instrument-completion-status-by-site", - "strict": True, - "transforms": [ + }, { - "name": "Recruitment Survey", + "name": "Opthalmic Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "recruitment_survey_complete"], + "groups": ["siteid", "ophthalmic_survey_complete"], "value": "record_id", "func": "count", } @@ -1555,14 +3006,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1575,11 +3026,11 @@ }, }, { - "name": "FAQ Survey", + "name": "PhenX SDOH Combined Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "faq_survey_complete"], + "groups": ["siteid", "px_sdoh_combined_survey_complete"], "value": "record_id", "func": "count", } @@ -1593,14 +3044,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "FAQ Survey", - "field": "faq_survey_complete", + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "FAQ Survey", - "field": "faq_survey_complete", + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1613,11 +3064,11 @@ }, }, { - "name": "Screening Survey", + "name": "PhenX Food Insecurity Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "screening_survey_complete"], + "groups": ["siteid", "px_food_insecurity_survey_complete"], "value": "record_id", "func": "count", } @@ -1631,14 +3082,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Screening Survey", - "field": "screening_survey_complete", + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Screening Survey", - "field": "screening_survey_complete", + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1651,11 +3102,14 @@ }, }, { - "name": "Preconsent Survey", + "name": "PhenX Neighborhood Environment Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "preconsent_survey_complete"], + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], "value": "record_id", "func": "count", } @@ -1669,14 +3123,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1689,11 +3143,14 @@ }, }, { - "name": "Consent Survey", + "name": "PhenX Racial and Ethnic Discrimination Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "consent_survey_complete"], + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], "value": "record_id", "func": "count", } @@ -1707,14 +3164,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Consent Survey", - "field": "consent_survey_complete", + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Consent Survey", - "field": "consent_survey_complete", + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1727,14 +3184,11 @@ }, }, { - "name": "Staff Consent Attestation Survey", + "name": "Decline Participation Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": [ - "siteid", - "staff_consent_attestation_survey_complete", - ], + "groups": ["siteid", "decline_participation_survey_complete"], "value": "record_id", "func": "count", } @@ -1748,14 +3202,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1768,11 +3222,11 @@ }, }, { - "name": "Demographics Survey", + "name": "Study Enrollment Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "demographics_survey_complete"], + "groups": ["siteid", "study_enrollment_complete"], "value": "record_id", "func": "count", } @@ -1786,14 +3240,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1806,11 +3260,11 @@ }, }, { - "name": "Health Survey", + "name": "Driving Record", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "health_survey_complete"], + "groups": ["siteid", "driving_record_complete"], "value": "record_id", "func": "count", } @@ -1824,14 +3278,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", + "name": "Driving Record", + "field": "driving_record_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", + "name": "Driving Record", + "field": "driving_record_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1844,11 +3298,11 @@ }, }, { - "name": "Substance Use Survey", + "name": "Device Distribution", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "substance_use_survey_complete"], + "groups": ["siteid", "device_distribution_complete"], "value": "record_id", "func": "count", } @@ -1862,14 +3316,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", + "name": "Device Distribution", + "field": "device_distribution_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", + "name": "Device Distribution", + "field": "device_distribution_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1882,11 +3336,11 @@ }, }, { - "name": "CES-D-10 Survey", + "name": "Medications Assessment", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "cesd10_survey_complete"], + "groups": ["siteid", "meds_assessment_complete"], "value": "record_id", "func": "count", } @@ -1900,14 +3354,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", + "name": "Medications Assessment", + "field": "meds_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", + "name": "Medications Assessment", + "field": "meds_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1920,11 +3374,11 @@ }, }, { - "name": "PAID-5 DM Survey", + "name": "Physical Assessment", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "paid5_dm_survey_complete"], + "groups": ["siteid", "physical_assessment_complete"], "value": "record_id", "func": "count", } @@ -1938,14 +3392,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", + "name": "Physical Assessment", + "field": "physical_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", + "name": "Physical Assessment", + "field": "physical_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1958,11 +3412,11 @@ }, }, { - "name": "Diabetes Survey", + "name": "BCVA", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "diabetes_survey_complete"], + "groups": ["siteid", "bcva_complete"], "value": "record_id", "func": "count", } @@ -1976,14 +3430,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", + "name": "BCVA", + "field": "bcva_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", + "name": "BCVA", + "field": "bcva_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -1996,11 +3450,11 @@ }, }, { - "name": "Dietary Survey", + "name": "Photopic MARS", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "dietary_survey_complete"], + "groups": ["siteid", "photopic_mars_complete"], "value": "record_id", "func": "count", } @@ -2014,14 +3468,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", + "name": "Photopic MARS", + "field": "photopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", + "name": "Photopic MARS", + "field": "photopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2034,11 +3488,11 @@ }, }, { - "name": "Opthalmic Survey", + "name": "Mesopic MARS", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "ophthalmic_survey_complete"], + "groups": ["siteid", "mesopic_mars_complete"], "value": "record_id", "func": "count", } @@ -2052,14 +3506,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2072,11 +3526,11 @@ }, }, { - "name": "PhenX SDOH Combined Survey", + "name": "Monofilament", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "groups": ["siteid", "monofilament_complete"], "value": "record_id", "func": "count", } @@ -2090,14 +3544,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", + "name": "Monofilament", + "field": "monofilament_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", + "name": "Monofilament", + "field": "monofilament_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2110,11 +3564,11 @@ }, }, { - "name": "PhenX Food Insecurity Survey", + "name": "MOCA", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "px_food_insecurity_survey_complete"], + "groups": ["siteid", "moca_complete"], "value": "record_id", "func": "count", } @@ -2128,14 +3582,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", + "name": "MOCA", + "field": "moca_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", + "name": "MOCA", + "field": "moca_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2148,14 +3602,11 @@ }, }, { - "name": "PhenX Neighborhood Environment Survey", + "name": "ECG Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": [ - "siteid", - "px_neighborhood_environment_survey_complete", - ], + "groups": ["siteid", "ecg_complete"], "value": "record_id", "func": "count", } @@ -2169,14 +3620,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", + "name": "ECG Survey", + "field": "ecg_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", + "name": "ECG Survey", + "field": "ecg_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2189,14 +3640,11 @@ }, }, { - "name": "PhenX Racial and Ethnic Discrimination Survey", + "name": "Lab Results Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": [ - "siteid", - "px_racial_ethnic_discrimination_survey_complete", - ], + "groups": ["siteid", "lab_results_complete"], "value": "record_id", "func": "count", } @@ -2210,14 +3658,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", + "name": "Lab Results Survey", + "field": "lab_results_complete", "missing_value": missing_value_generic, "astype": str, }, - "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", + "subgroup": { + "name": "Lab Results Survey", + "field": "lab_results_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2230,11 +3678,11 @@ }, }, { - "name": "Decline Participation Survey", + "name": "Specimen Management", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "decline_participation_survey_complete"], + "groups": ["siteid", "specimen_management_complete"], "value": "record_id", "func": "count", } @@ -2248,14 +3696,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", + "name": "Specimen Management", + "field": "specimen_management_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", + "name": "Specimen Management", + "field": "specimen_management_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2268,11 +3716,11 @@ }, }, { - "name": "Study Enrollment Survey", + "name": "Device Return", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "study_enrollment_complete"], + "groups": ["siteid", "device_return_complete"], "value": "record_id", "func": "count", } @@ -2286,14 +3734,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", + "name": "Device Return", + "field": "device_return_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", + "name": "Device Return", + "field": "device_return_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2306,11 +3754,11 @@ }, }, { - "name": "Driving Record", + "name": "Disposition Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "driving_record_complete"], + "groups": ["siteid", "disposition_complete"], "value": "record_id", "func": "count", } @@ -2324,14 +3772,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Driving Record", - "field": "driving_record_complete", + "name": "Disposition Survey", + "field": "disposition_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Driving Record", - "field": "driving_record_complete", + "name": "Disposition Survey", + "field": "disposition_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2344,11 +3792,11 @@ }, }, { - "name": "Device Distribution", + "name": "Data Management Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "device_distribution_complete"], + "groups": ["siteid", "data_management_complete"], "value": "record_id", "func": "count", } @@ -2362,14 +3810,14 @@ }, "group": { "remap": lambda x: x["name"], - "name": "Device Distribution", - "field": "device_distribution_complete", + "name": "Data Management Survey", + "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Device Distribution", - "field": "device_distribution_complete", + "name": "Data Management Survey", + "field": "data_management_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2381,33 +3829,44 @@ }, }, }, + ], + }, +) + +# Overview +surveyCompletionStatusTransformConfig: Tuple[str, Dict[str, Any]] = ( + "compoundTransform", + { + "key": "instrument-completion-status", + "strict": True, + "transforms": [ { - "name": "Medications Assessment", + "name": "Demographics Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "meds_assessment_complete"], + "groups": ["demographics_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Demographics Survey", + "field": "demographics_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", + "name": "Demographics Survey", + "field": "demographics_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", + "name": "Demographics Survey", + "field": "demographics_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2420,32 +3879,32 @@ }, }, { - "name": "Physical Assessment", + "name": "Health Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "physical_assessment_complete"], + "groups": ["health_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Health Survey", + "field": "health_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Physical Assessment", - "field": "physical_assessment_complete", + "name": "Health Survey", + "field": "health_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Physical Assessment", - "field": "physical_assessment_complete", + "name": "Health Survey", + "field": "health_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2458,32 +3917,32 @@ }, }, { - "name": "BCVA", + "name": "Substance Use Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "bcva_complete"], + "groups": ["substance_use_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "BCVA", - "field": "bcva_complete", + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "BCVA", - "field": "bcva_complete", + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2496,32 +3955,32 @@ }, }, { - "name": "Photopic MARS", + "name": "CES-D-10 Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "photopic_mars_complete"], + "groups": ["cesd10_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Photopic MARS", - "field": "photopic_mars_complete", + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Photopic MARS", - "field": "photopic_mars_complete", + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2534,32 +3993,32 @@ }, }, { - "name": "Mesopic MARS", + "name": "PAID-5 DM Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "mesopic_mars_complete"], + "groups": ["paid5_dm_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2572,32 +4031,32 @@ }, }, { - "name": "Monofilament", + "name": "Diabetes Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "monofilament_complete"], + "groups": ["diabetes_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Monofilament", - "field": "monofilament_complete", + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Monofilament", - "field": "monofilament_complete", + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2610,32 +4069,32 @@ }, }, { - "name": "MOCA", + "name": "Dietary Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "moca_complete"], + "groups": ["dietary_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Dietary Survey", + "field": "dietary_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "MOCA", - "field": "moca_complete", + "name": "Dietary Survey", + "field": "dietary_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "MOCA", - "field": "moca_complete", + "name": "Dietary Survey", + "field": "dietary_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2648,32 +4107,32 @@ }, }, { - "name": "ECG Survey", + "name": "Opthalmic Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "ecg_complete"], + "groups": ["ophthalmic_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "ECG Survey", - "field": "ecg_complete", + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "ECG Survey", - "field": "ecg_complete", + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2686,32 +4145,32 @@ }, }, { - "name": "Lab Results Survey", + "name": "PhenX SDOH Combined Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "lab_results_complete"], + "groups": ["px_sdoh_combined_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Lab Results Survey", - "field": "lab_results_complete", + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Lab Results Survey", - "field": "lab_results_complete", + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2724,32 +4183,32 @@ }, }, { - "name": "Specimen Management", + "name": "PhenX Food Insecurity Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "specimen_management_complete"], + "groups": ["px_food_insecurity_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Specimen Management", - "field": "specimen_management_complete", + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Specimen Management", - "field": "specimen_management_complete", + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2762,32 +4221,32 @@ }, }, { - "name": "Device Return", + "name": "PhenX Neighborhood Environment Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "device_return_complete"], + "groups": ["px_neighborhood_environment_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Device Return", - "field": "device_return_complete", + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Device Return", - "field": "device_return_complete", + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2800,32 +4259,32 @@ }, }, { - "name": "Disposition Survey", + "name": "PhenX Racial and Ethnic Discrimination Survey", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "disposition_complete"], + "groups": ["px_racial_ethnic_discrimination_survey_complete",], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Count (N)", + "field": "record_id", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Disposition Survey", - "field": "disposition_complete", + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Disposition Survey", - "field": "disposition_complete", + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2838,32 +4297,32 @@ }, }, { - "name": "Data Management Survey", + "name": "Medications Assessment", "vtype": "DoubleCategorical", "methods": [ { - "groups": ["siteid", "data_management_complete"], + "groups": ["meds_assessment_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Site", - "field": "siteid", + "name": "Medications Assessment", + "field": "meds_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, "group": { "remap": lambda x: x["name"], - "name": "Data Management Survey", - "field": "data_management_complete", + "name": "Medications Assessment", + "field": "meds_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, "subgroup": { - "name": "Data Management Survey", - "field": "data_management_complete", + "name": "Medications Assessment", + "field": "meds_assessment_complete", "missing_value": missing_value_generic, "astype": str, }, @@ -2878,16 +4337,26 @@ ], }, ) - moduleTransformConfigs: Dict[str, Any] = { + "device-collection-status-by-site": deviceCollectionStatusBySiteTransformConfig, "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, + "survey-completion-status": surveyCompletionStatusTransformConfig, "survey-completion-status-by-site": surveyCompletionStatusBySiteTransformConfig, "recruitment-operations-status-by-site": recruitmentOperationsBySiteTransformConfig, "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, + "phenotype-sex-by-race": phenotypeSexByRaceTransformConfig, + "race-phenotype-by-sex": racePhenotypeBySexTransformConfig, + "race-sex-by-phenotype": raceSexByPhenotypeTransformConfig, + "sex-phenotype-by-race": sexPhenotypeByRaceTransformConfig, + "sex-race-by-phenotype": sexRaceByPhenotypeTransformConfig, + "phenotype-recruitment": phenotypeRecruitmentTransformConfig, "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, + "race-recruitment": raceRecruitmentTransformConfig, "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, + "sex-recruitment": sexRecruitmentTransformConfig, + "sex-recruitment-by-site": sexRecruitmentBySiteTransformConfig, "race-sex-by-site": raceSexBySiteTransformConfig, "current-medications-by-site": currentMedicationsBySiteTransformConfig, } diff --git a/modules/etl/transforms/__init__.py b/modules/etl/transforms/__init__.py index 7f804252..7fff8571 100644 --- a/modules/etl/transforms/__init__.py +++ b/modules/etl/transforms/__init__.py @@ -1,2 +1,3 @@ -from .redcap_transform import RedcapTransform +from .redcap_live_transform import RedcapLiveTransform +from .redcap_release_transform import RedcapReleaseTransform from .module_transform import ModuleTransform diff --git a/modules/etl/transforms/redcap_transform.py b/modules/etl/transforms/redcap_live_transform.py similarity index 96% rename from modules/etl/transforms/redcap_transform.py rename to modules/etl/transforms/redcap_live_transform.py index b953de55..5e02a898 100644 --- a/modules/etl/transforms/redcap_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -8,8 +8,9 @@ import numpy as np -class RedcapTransform(object): +class RedcapLiveTransform(object): def __init__(self, config: dict) -> None: + # # Config # @@ -17,6 +18,8 @@ def __init__(self, config: dict) -> None: # REDCap API Config self.redcap_api_url = config["redcap_api_url"] self.redcap_api_key = config["redcap_api_key"] + self.redcap_data_dir = config["redcap_data_dir"] + self.redcap_metadata_config = config["project_metadata"] # Set Transform Key self.key = config["key"] if "key" in config else "redcap-transform" @@ -118,8 +121,9 @@ def __init__(self, config: dict) -> None: # Initialize PyCap Objects self.logger.info(f"Retrieving REDCap project data") self.project = Project(self.redcap_api_url, self.redcap_api_key) + + # Load REDCap Project Metadata self.metadata = self.project.export_metadata() - self.repeat_events_data = self.project.export_repeating_instruments_events() # # Setup Reports & Apply Transforms @@ -143,17 +147,17 @@ def __init__(self, config: dict) -> None: report_kwdargs = report_config["kwdargs"] | self._reports_kwdargs report_transforms = report_config["transforms"] report = self.project.export_report(**report_kwdargs) + # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], - "report": report, - "df": pd.DataFrame(report), + "df": pd.DataFrame(report, dtype = str), "transforms": report_transforms, "transformed": None, "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), } - # Generate Transformed Report + # Apply Pre-Merge Report Transforms self.logger.info(f"Applying REDCap report transforms") for report_key, report_object in self.reports.items(): self._apply_report_transforms(report_key) @@ -184,14 +188,6 @@ def get_report_id(self, report_key: str) -> str: """ return self.reports[report_key]["id"] - def get_report_pycap( - self, report_key: str - ) -> Union[List[Dict[str, Any]], str, pd.DataFrame]: - """ - Returns a PyCap Report object containing the report. - """ - return self.reports[report_key]["report"] - def get_report_df(self, report_key: str) -> pd.DataFrame: """ Returns a pd.DataFrame instance containing the report. @@ -221,6 +217,37 @@ def get_report_annotations(self, report_key: str) -> List[Dict[str, Any]]: """ return self.reports[report_key]["annotations"] + # + # Report Merging + # + + def _merge_reports( + self, + index_columns: List[str], + merge_steps: List[Tuple[str, Dict[str, Any]]], + ) -> pd.DataFrame: + """ + Performs N - 1 merge transforms on N reports. + """ + + receiving_report_key, _ = merge_steps[0] + df_receiving_report = self.reports[receiving_report_key]["transformed"][ + index_columns + ] + + if len(merge_steps) > 0: + for providing_report_key, merge_kwdargs in merge_steps: + df_providing_report = self.reports[providing_report_key]["transformed"] + df_receiving_report = df_receiving_report.merge( + df_providing_report, **merge_kwdargs + ) + else: + self.logger.warn( + f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." + ) + + return df_receiving_report + # # Transform Applicator # @@ -265,7 +292,7 @@ def _drop_columns( columns: List[str] = [], annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) df = df.drop(columns=columns) return df @@ -287,7 +314,7 @@ def _keep_columns( ) -> pd.DataFrame: columns = list( set(df.columns) - - set(self._resolve_columns_with_dataframe(df=df, columns=columns)) + - set(self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns)) ) df = df.drop(columns=columns) return df @@ -310,7 +337,7 @@ def _append_column_suffix( separator: str = "", annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) df[columns] = df[columns].rename( mapper=lambda name: f"{name}{separator}{suffix}" ) @@ -347,7 +374,7 @@ def _prepend_column_prefix( separator: str = "", annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) df[columns] = df[columns].rename( mapper=lambda name: f"{prefix}{separator}{name}" ) @@ -384,7 +411,7 @@ def _remap_values_by_columns( annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: # Resolve Mappable Fields and Available Value Maps - columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) mappable_fields: List[Dict[str, Any]] if len(value_map) > 0: @@ -498,7 +525,7 @@ def _map_missing_values_by_columns( missing_value: Any = None, annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) missing_value = ( missing_value if missing_value is not None else self.missing_value_generic ) @@ -537,7 +564,7 @@ def _drop_rows( condition: Callable = lambda column: column == "", annotation: List[Dict[str, Any]] = [], ) -> pd.DataFrame: - columns = self._resolve_columns_with_dataframe(df=df, columns=columns) + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] return df @@ -691,44 +718,13 @@ def new_column_from_binary_columns_negative_class( dtype=dtype, ) - # - # Report Merging - # - - def _merge_reports( - self, - index_columns: List[str], - merge_steps: List[Tuple[str, Dict[str, Any]]], - ) -> pd.DataFrame: - """ - Performs N - 1 merge transforms on N reports. - """ - - receiving_report_key, _ = merge_steps[0] - df_receiving_report = self.reports[receiving_report_key]["transformed"][ - index_columns - ] - - if len(merge_steps) > 0: - for providing_report_key, merge_kwdargs in merge_steps: - df_providing_report = self.reports[providing_report_key]["transformed"] - df_receiving_report = df_receiving_report.merge( - df_providing_report, **merge_kwdargs - ) - else: - self.logger.warn( - f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." - ) - - return df_receiving_report - # # Utilities # # Transform Prelude - Get Applicable Transform Columns def _resolve_columns_with_dataframe( - self, df: pd.DataFrame, columns: List[str] + self, df: pd.DataFrame, columns: List[str], default_columns: List[str] ) -> List[str]: """ Internal utility function. Uses set logic to ensure @@ -740,21 +736,21 @@ def _resolve_columns_with_dataframe( if len(requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – columns parameter has no values. Defaulting to df.columns" + f"Unexpected Transform – columns parameter has no values. Defaulting to all df.columns" ) - resolved_columns = [*available_columns] + resolved_columns = default_columns elif len(available_columns & requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – none of the values in the columns parameter were found in df.columns. Defaulting to df.columns" + f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to all df.columns" ) - resolved_columns = [*available_columns] + resolved_columns = default_columns elif len(requested_columns - available_columns) > 0: self.logger.warn( f"Unexpected Transform – df.columns missing values present in columns parameter: {', '.join([*requested_columns - available_columns])}. Continuing with union." ) resolved_columns = [*(available_columns & requested_columns)] else: - resolved_columns = [*requested_columns] + resolved_columns = columns return resolved_columns diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py new file mode 100644 index 00000000..ca18eec6 --- /dev/null +++ b/modules/etl/transforms/redcap_release_transform.py @@ -0,0 +1,935 @@ +# Library Modules +from typing import Any, Callable, Union, List, Dict, Tuple +import re, os, csv, json, logging, datetime + +# Third Party Modules +from azure.storage.blob import BlobServiceClient +from redcap import Project +import pandas as pd +import numpy as np + + +class RedcapReleaseTransform(object): + def __init__(self, config: dict) -> None: + + # + # Config + # + + # Get CWD + self.cwd = os.getcwd() + + # REDCap API Config + self.redcap_api_url = config["redcap_api_url"] + self.redcap_api_key = config["redcap_api_key"] + self.redcap_data_dir = config["redcap_data_dir"] + self.redcap_metadata_config = config["project_metadata"] + + # Set Transform Key + self.key = config["key"] if "key" in config else "redcap-transform" + + # Data Config + self.index_columns = ( + config["index_columns"] if "index_columns" in config else ["record_id"] + ) + + # REDCap Reports Config + self.reports_configs = config["reports"] if "reports" in config else [] + + # Report Merging + self.post_transform_merge = ( + config["post_transform_merge"] + if "post_transform_merge" in config + else ([], []) + ) + + # Post Merge Transforms + self.post_merge_transforms = ( + config["post_merge_transforms"] if "post_merge_transforms" in config else [] + ) + + # Column Value Separator + self.multivalue_separator = ( + config["multivalue_separator"] if "multivalue_separator" in config else "|" + ) + + # CSV Float Format (Default: "%.2f") + self.csv_float_format = ( + config["csv_float_format"] if "csv_float_format" in config else "%.2f" + ) + + self.missing_value_generic = ( + config["missing_value_generic"] + if "missing_value_generic" in config + else "Value Unavailable" + ) + + # Logging Config + self.logging_config = ( + config["logging_config"] + if "logging_config" in config + else { + "encoding": "utf-8", + "filename": "REDCapETL.log", + "level": logging.INFO, + } + ) + + # Configure Logging + logging.basicConfig(**self.logging_config) + self.logger = logging.getLogger("RedcapTransform") + + # + # REDCap Parsing Variables + # + + # Regex Complex Field Parsers + self._field_rgx = { + "radio": re.compile(r"^[0-9\.]{1,17}"), + "checkbox": re.compile(r"^[0-9\.]{1,17}"), + "dropdown": re.compile(r"^[0-9\.]{1,17}"), + "yesno": re.compile(r"^[0-9\.]{1,17}"), + "text": re.compile(r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}"), + "descriptive": re.compile(r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}"), + "notes": re.compile(r"^[a-zA-Z0-9\-\_\(\)\[\]\&\+\?\!\$\*]{1,128}"), + "file": re.compile(r".*"), + "signature": re.compile(r".*"), + "calc": re.compile(r".*"), + } + + # General Parsing Variables + self.none_values = [ + np.nan, + pd.NaT, + None, + "nan", + "NaN", + "-", + "", + self.missing_value_generic, + ] + self.none_map = {key: self.missing_value_generic for key in self.none_values} + self.survey_instrument_map = { + "2": "Complete", + "1": "Unverified", + "0": "Incomplete", + "": self.missing_value_generic, + } + # self.boolean_map = { + # "Yes": 1.0, + # "1.0": 1.0, + # "true": 1.0, + # "True": 1.0, + # 1: 1.0, + # "No": 0.0, + # "0.0": 0.0 + # "false": 0.0, + # "False": 0.0, + # 0: 0.0, + # "": self.missing_value_generic, + # "NaN": self.missing_value_generic, + # } + + self.logger.info(f"Initialized") + + # + # PyCap Initialization + # + + # Initialize PyCap Objects + self.logger.info(f"Retrieving REDCap project data") + self.project = Project(self.redcap_api_url, self.redcap_api_key) + + # Load Release REDCap Project Metadata + self.metadata = self.get_stored_project_metadata( + os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") or "", + os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER") or "", + f"{self.redcap_metadata_config['filepath']}/{self.redcap_metadata_config['filename']}" + ) + + # + # Setup Reports & Apply Transforms + # + + # Internal Defaults + # - Key Assumptions for Transform Functions + # – Only Update if REDCap API and/or PyCap Update + self._reports_kwdargs = { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "\t", + } + # Get & Structure Report + self.logger.info(f"Retrieving REDCap reports") + self.reports = {} + for report_config in self.reports_configs: + # Get Report + report_key = report_config["key"] + report_kwdargs = report_config["kwdargs"] | self._reports_kwdargs + report_transforms = report_config["transforms"] + + # Load Release REDCap Reports + report = self.get_stored_report( + os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") or "", + os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER") or "", + f"{report_config['filepath']}/{report_config['filename']}" + ) + + # Structure Reports + self.reports[report_key] = { + "id": report_kwdargs["report_id"], + "df": pd.read_csv(f"{self.cwd}/{self.redcap_data_dir}/{report_config['filepath']}/{report_config['filename']}", delimiter = self._reports_kwdargs["csv_delimiter"], dtype = str), + "transforms": report_transforms, + "transformed": None, + "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), + } + + # Apply Pre-Merge Report Transforms + self.logger.info(f"Applying REDCap report transforms") + for report_key, report_object in self.reports.items(): + self._apply_report_transforms(report_key) + + # Merge Reports + self.logger.info(f"Merging REDCap reports") + index_columns, merge_steps = self.post_transform_merge + self.merged = self._merge_reports(index_columns, merge_steps) + + # Apply Post-Merge Transforms + self.logger.info(f"Applying REDCap report post-merge transforms") + for transform, transform_kwdargs in self.post_merge_transforms: + self.merged = self.apply_transform( + self.merged, transform, transform_kwdargs + ) + + self.logger.info(f"REDCap transforms complete") + + return + + # + # Getters + # + + def get_stored_project_metadata(self, connection_string: str, container_name: str, blob_path: str) -> dict: + + # Connect to Azure Blog Storage + blob_service_client = BlobServiceClient.from_connection_string(connection_string) + container_client = blob_service_client.get_container_client(container_name) + blob_client = container_client.get_blob_client(blob_path) + + # Get Blob + download_stream = blob_client.download_blob() + + return json.loads(download_stream.readall()) + + def get_stored_report(self, connection_string: str, container_name: str, blob_path: str) -> pd.DataFrame: + + # Connect to Azure Blog Storage + blob_service_client = BlobServiceClient.from_connection_string(connection_string) + container_client = blob_service_client.get_container_client(container_name) + blob_client = container_client.get_blob_client(blob_path) + + # Get Blob + df = pd.read_csv(blob_client.download_blob()) + return df + + def get_report_id(self, report_key: str) -> str: + """ + Returns a str instance of the REDCap report ID. + """ + return self.reports[report_key]["id"] + + def get_report_df(self, report_key: str) -> pd.DataFrame: + """ + Returns a pd.DataFrame instance containing the report. + """ + return self.reports[report_key]["df"] + + def get_report_transformed_df(self, report_key: str) -> pd.DataFrame: + """ + Returns a pd.DataFrame instance containing the report + with normalization transforms applied. + """ + return self.reports[report_key]["transformed"] + + def get_report_transforms( + self, report_key: str + ) -> List[Tuple[str, Dict[str, Any]]]: + """ + Returns a list of transforms that will be applied to + the report + """ + return self.reports[report_key]["transforms"] + + def get_report_annotations(self, report_key: str) -> List[Dict[str, Any]]: + """ + Returns a list of annotations generated from the + REDCap metadata API call. + """ + return self.reports[report_key]["annotations"] + + # + # Report Merging + # + + def _merge_reports( + self, + index_columns: List[str], + merge_steps: List[Tuple[str, Dict[str, Any]]], + ) -> pd.DataFrame: + """ + Performs N - 1 merge transforms on N reports. + """ + + receiving_report_key, _ = merge_steps[0] + df_receiving_report = self.reports[receiving_report_key]["transformed"][ + index_columns + ] + + if len(merge_steps) > 0: + for providing_report_key, merge_kwdargs in merge_steps: + df_providing_report = self.reports[providing_report_key]["transformed"] + df_receiving_report = df_receiving_report.merge( + df_providing_report, **merge_kwdargs + ) + else: + self.logger.warn( + f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." + ) + + return df_receiving_report + + # + # Transform Applicator + # + + # Applies Declared Transforms to Reports + def _apply_report_transforms(self, report_key: str) -> None: + """ + Interal method that applies the transforms to each + report as an idempotent transform stack. + """ + report = self.reports[report_key] + annotation = report["annotation"] + report["transformed"] = report["df"] + for transform in report["transforms"]: + transform_name, transform_kwdargs = transform + transform_kwdargs = transform_kwdargs | {"annotation": annotation} + report["transformed"] = self.apply_transform( + report["transformed"], transform_name, transform_kwdargs + ) + + return + + def apply_transform( + self, + df: pd.DataFrame, + transform_name: str, + transform_kwdargs: Dict[str, Any] = {}, + ) -> pd.DataFrame: + return getattr(self, f"_{transform_name}")(df, **transform_kwdargs) + + # + # Transforms - Columns + # + + # + # Drop Columns + # + + def _drop_columns( + self, + df: pd.DataFrame, + columns: List[str] = [], + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) + df = df.drop(columns=columns) + return df + + def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: + """ + Drop columns from pd.DataFrame. + """ + return self._drop_columns(df=df, columns=columns) + + # + # Keep Columns + # + + def _keep_columns( + self, + df: pd.DataFrame, + columns: List[str] = [], + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = list( + set(df.columns) + - set(self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns)) + ) + df = df.drop(columns=columns) + return df + + def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: + """ + Keep only selected columns in pd.DataFrame. + """ + return self._keep_columns(df=df, columns=columns) + + # + # Transform - Append Column Prefix + # + + def _append_column_suffix( + self, + df: pd.DataFrame, + columns: List[str] = [], + suffix: str = "", + separator: str = "", + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) + df[columns] = df[columns].rename( + mapper=lambda name: f"{name}{separator}{suffix}" + ) + return df + + def append_column_suffix( + self, + df: pd.DataFrame, + columns: List[str] = [], + suffix: str = "", + separator: str = "", + ) -> pd.DataFrame: + """ + Append a suffix to columns of pd.DataFrame. Note: If no + columns parameter is provided, the suffix is applied every + column. If no suffix is provided, the column names remain + unchanged. A separator argument allows for the expansion + of column names by one or more characters, e.g. "_" for + snakecase. + """ + return self._append_column_suffix( + df=df, columns=columns, suffix=suffix, separator=separator + ) + + # + # Transform - Prepend Column Prefix + # + + def _prepend_column_prefix( + self, + df: pd.DataFrame, + columns: List[str] = [], + prefix: str = "", + separator: str = "", + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) + df[columns] = df[columns].rename( + mapper=lambda name: f"{prefix}{separator}{name}" + ) + return df + + def prepend_column_prefix( + self, + df: pd.DataFrame, + columns: List[str] = [], + prefix: str = "", + separator: str = "", + ) -> pd.DataFrame: + """ + Append a prefix to columns of pd.DataFrame. Note: If no + columns parameter is provided, the prefix is applied every + column. If no prefix is provided, the column names remain + unchanged. A separator argument allows for the expansion + of column names by one or more characters, e.g. "_" for + snakecase. + """ + return self._prepend_column_prefix( + df=df, columns=columns, prefix=prefix, separator=separator + ) + + # + # Transforms - Remap Values by Columns + # + + def _remap_values_by_columns( + self, + df: pd.DataFrame, + columns: List[str], + value_map: Dict[str, Any] = {}, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + # Resolve Mappable Fields and Available Value Maps + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) + + mappable_fields: List[Dict[str, Any]] + if len(value_map) > 0: + mappable_fields = [ + {"name": column, "options": value_map} for column in columns + ] + else: + mappable_fields = [ + field + for field in annotation + if len(field["options"]) > 0 and field["name"] in columns + ] + + for mappable_field in mappable_fields: + column, value_map = mappable_field["name"], mappable_field["options"] + for i, value in enumerate(df[column]): + subvalues = [ + subvalue.strip() + for subvalue in str(value).split(",") + if len(subvalue) > 0 + ] + df.loc[i, column] = self.multivalue_separator.join( + [ + value_map[subvalue] + for subvalue in subvalues + if subvalue in value_map.keys() + ] + ) + + return df + + def remap_values_by_columns( + self, + df: pd.DataFrame, + columns: List[str], + value_map: Dict[str, Any] = {}, + ) -> pd.DataFrame: + """ + Remap values by column using a list of annotations. + Each annotation is a dictionary containing a the + following keys: "name", "type", and "options". Key + to this method are then "name" and "options" entries. + The value of the "name" corresponds to the + pd.DataFrame column name. The value of the"options" + entry is a value_map object generated from the + REDCapo metadata API request: + + annotation = { + "name": field["field_name"], + "type": field["field_type"], + "options": field["field_options"] + } + + If multiple values are found in the field, they will + be mapped with a separator. The default separator is + a pipe (i.e. "|"). + + Returns a transformed pd.DataFrame + """ + return self._remap_values_by_columns( + df=df, columns=columns, value_map=value_map + ) + + # + # Transform - Values By Column + # + + def _transform_values_by_column( + self, + df: pd.DataFrame, + column: str, + new_column_name: str, + transform: Callable, + missing_value: Any, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + df[new_column_name] = df.loc[df[column] != missing_value, column].apply( + transform + ) + df[new_column_name] = df[new_column_name].fillna(missing_value) + return df + + def transform_values_by_column( + self, + df: pd.DataFrame, + column: str, + new_column_name: str, + transform: Callable, + missing_value: Any, + ) -> pd.DataFrame: + """ + Replace 0-length values or values with keys in + self.none_map with self.missing_value_generic. + """ + return self._transform_values_by_column( + df=df, + column=column, + new_column_name=new_column_name, + transform=transform, + missing_value=missing_value, + ) + + # + # Transform - Map Missing Values By Columns + # + + def _map_missing_values_by_columns( + self, + df: pd.DataFrame, + columns: List[str], + missing_value: Any = None, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) + missing_value = ( + missing_value if missing_value is not None else self.missing_value_generic + ) + for column in columns: + for i, value in enumerate(df[column]): + if (len(str(value)) == 0) or (value in self.none_map.keys()): + df.loc[i, column] = missing_value + else: + continue + + return df + + def map_missing_values_by_columns( + self, df: pd.DataFrame, columns: List[str], missing_value: Any + ) -> pd.DataFrame: + """ + Replace 0-length values or values with keys in + self.none_map with self.missing_value_generic. + """ + return self._map_missing_values_by_columns( + df=df, columns=columns, missing_value=missing_value + ) + + # + # Transforms - Rows + # + + # + # Drop Rows + # + + def _drop_rows( + self, + df: pd.DataFrame, + columns: List[str] = [], + condition: Callable = lambda column: column == "", + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) + df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] + return df + + def drop_rows( + self, + df: pd.DataFrame, + columns: List[str], + condition: Callable = lambda column: column == "", + ) -> pd.DataFrame: + """ + Drop rows from pd.DataFrame. + """ + return self._drop_rows(df=df, columns=columns) + + # + # Transforms - Aggregation + # + + # ... + + # + # Transforms - Aggregate Repeat Instruments by Index + # + + def _aggregate_repeat_instrument_by_index( + self, + df: pd.DataFrame, + aggregator: str = "max", + dtype: Callable = float, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + new_columns = [column for column in df["redcap_repeat_instrument"].unique() if column is not np.nan] + pivot = pd.pivot_table( + df, + index=self.index_columns, + columns=["redcap_repeat_instrument"], + values="redcap_repeat_instance", + aggfunc=aggregator, + fill_value=self.missing_value_generic, + ) + df = df.merge(pivot, how="outer", on=self.index_columns) + df = df.drop_duplicates(self.index_columns, keep="first") + for column in new_columns: + df[column] = df[column].astype(dtype) + return df + + def aggregate_repeat_instrument_by_index( + self, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float + ) -> pd.DataFrame: + """ + Pre-processing REDCap repeat_instrument so each instrument + has its own column and the value. The value is computed + using an aggregation function applied to the repeat_instance + field. + """ + return self._aggregate_repeat_instrument_by_index( + df=df, aggregator=aggregator, dtype=dtype + ) + + # + # Generate New Columns + # + + def _new_column_from_binary_columns_positive_class( + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + all_negative_value: str = "", + default_value: str | None = "Value Unavailable", + dtype: Callable = float, + annotation: List[Dict[str, Any]] = [], + ) -> pd.DataFrame: + new_column_name = ( + new_column_name + if len(new_column_name) > 0 + else "_".join(column_name_map.keys()) + ) + df[new_column_name] = "" + for column_name, column_value in column_name_map.items(): + df.loc[ + df[column_name] == "Yes", new_column_name + ] += f"{column_value}{self.multivalue_separator}" + for column_name, column_value in column_name_map.items(): + df.loc[ + (df[column_name] == default_value) & (df[new_column_name] == ""), + new_column_name, + ] = default_value + df.loc[df[new_column_name] == "", new_column_name] = all_negative_value + # Remove delimiter character if column ends with it + rgx = f"\\{self.multivalue_separator}$" + df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex=True) + + return df + + + def new_column_from_binary_columns_positive_class( + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + all_negative_value: str = "", + default_value: str | None = "Value Unavailable", + dtype: Callable = float, + ) -> pd.DataFrame: + """ + Pre-processing REDCap repeat_instrument so each instrument + has its own column and the value. The value is computed + using an aggregation function applied to the repeat_instance + field. + """ + return self._new_column_from_binary_columns_positive_class( + df=df, + column_name_map=column_name_map, + new_column_name=new_column_name, + default_value=default_value, + dtype=dtype, + ) + + def _new_column_from_binary_columns_negative_class( + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + dtype: Callable = float, + ) -> pd.DataFrame: + new_column_name = ( + new_column_name + if len(new_column_name) > 0 + else "_".join(column_name_map.keys()) + ) + df[new_column_name] = df[list(column_name_map.keys())].idxmin(axis=1) + return df + + def new_column_from_binary_columns_negative_class( + self, + df: pd.DataFrame, + column_name_map: dict, + new_column_name: str = "", + dtype: Callable = float, + ) -> pd.DataFrame: + """ + Pre-processing REDCap repeat_instrument so each instrument + has its own column and the value. The value is computed + using an aggregation function applied to the repeat_instance + field. + """ + return self._new_column_from_binary_columns_negative_class( + df=df, + column_name_map=column_name_map, + new_column_name=new_column_name, + dtype=dtype, + ) + + # + # Utilities + # + + # Transform Prelude - Get Applicable Transform Columns + def _resolve_columns_with_dataframe( + self, df: pd.DataFrame, columns: List[str], default_columns: List[str] + ) -> List[str]: + """ + Internal utility function. Uses set logic to ensure + requested columns are available within the target + pd.DataFrame. + """ + available_columns, requested_columns = set(df.columns), set(columns) + resolved_columns = [] + + if len(requested_columns) == 0: + self.logger.warn( + f"Unexpected Transform – columns parameter has no values. Defaulting to all df.columns" + ) + resolved_columns = default_columns + elif len(available_columns & requested_columns) == 0: + self.logger.warn( + f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to all df.columns" + ) + resolved_columns = default_columns + elif len(requested_columns - available_columns) > 0: + self.logger.warn( + f"Unexpected Transform – df.columns missing values present in columns parameter: {', '.join([*requested_columns - available_columns])}. Continuing with union." + ) + resolved_columns = [*(available_columns & requested_columns)] + else: + resolved_columns = columns + + return resolved_columns + + # Extract REDCap Type Metadata + def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: + """ + Extracts REDCap field name, type, and options (the + metadata) for each column in the target pd.DataFrame + """ + + # REDCap Internal Variable Metadata + metadata = [ + {"name": "redcap_data_access_group", "type": "text", "options": {}}, + {"name": "redcap_repeat_instrument", "type": "text", "options": {}}, + {"name": "redcap_repeat_instance", "type": "number", "options": {}}, + ] + + field_types = set(field["field_type"] for field in self.metadata) + complex_types = {"dropdown", "radio", "checkbox"} + binary_types = {"yesno"} + text_types = {"text"} + skip_types = {"file", "calc", "descriptive", "notes"} + + # Get Column Metadata + columns = df.columns.tolist() + for field in sorted(self.metadata, key=lambda f: f["field_name"]): + if field["field_name"] in columns: + field_type = field["field_type"] + options: dict = {} + if field_type in complex_types: + rgx = self._field_rgx[field_type] + for option in field["select_choices_or_calculations"].split("|"): + k, v = ( + option.split(",")[0], + (",".join(option.split(",")[1:])).strip(), + ) + _k = int(k) if re.match(rgx, k) else str(k) + _v = int(v) if re.match(rgx, v) else str(v) + options[str(_k)] = _v + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": options | self.none_map, + } + ) + elif field_type in binary_types: + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": {"1": "Yes", "0": "No"} | self.none_map, + } + ) + elif field_type in text_types: + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": {}, + } + ) + elif field_type in skip_types: + metadata.append( + { + "name": field["field_name"], + "type": field["field_type"], + "options": {}, + } + ) + else: + continue + + return metadata + + # + # Exports + # + + # Export Untransformed (Raw) Reports + def export_raw( + self, path: str = "", separator: str = "\t", filetype: str = ".tsv" + ) -> object: + for report_key, report_object in self.reports.items(): + filename = f"{report_key}_raw{filetype}" + filepath = os.path.join(path, filename) + transformed = report_object["df"] + transformed.to_csv( + filepath, + sep=separator, + quoting=csv.QUOTE_NONNUMERIC, + float_format=self.csv_float_format, + ) + return self + + # Export Transformed Reports + def export_transformed( + self, path: str = "", separator: str = "\t", filetype: str = ".tsv" + ) -> object: + for report_key, report_object in self.reports.items(): + filename = f"{report_key}_transformed{filetype}" + filepath = os.path.join(path, filename) + transformed = report_object["transformed"] + transformed.to_csv( + filepath, + sep=separator, + quoting=csv.QUOTE_NONNUMERIC, + float_format=self.csv_float_format, + ) + return self + + # Export Merged Transforms + def export_merged_transformed( + self, path: str = "", separator: str = "\t", filetype: str = ".tsv" + ) -> object: + filename = f"transformed-merged_redcap-extract{filetype}" + filepath = os.path.join(path, filename) + self.merged.to_csv( + filepath, + sep=separator, + quoting=csv.QUOTE_NONNUMERIC, + float_format=self.csv_float_format, + ) + return self + + +if __name__ == "__main__": + pass +else: + pass diff --git a/poetry.lock b/poetry.lock index c58710fc..7b826e9c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -250,6 +250,47 @@ tests = ["attrs[tests-no-zope]", "zope-interface"] tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +[[package]] +name = "azure-core" +version = "1.30.1" +description = "Microsoft Azure Core Library for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, + {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, +] + +[package.dependencies] +requests = ">=2.21.0" +six = ">=1.11.0" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["aiohttp (>=3.0)"] + +[[package]] +name = "azure-storage-blob" +version = "12.19.1" +description = "Microsoft Azure Blob Storage Client Library for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-storage-blob-12.19.1.tar.gz", hash = "sha256:13e16ba42fc54ac2c7e8f976062173a5c82b9ec0594728e134aac372965a11b0"}, + {file = "azure_storage_blob-12.19.1-py3-none-any.whl", hash = "sha256:c5530dc51c21c9564e4eb706cd499befca8819b10dd89716d3fc90d747556243"}, +] + +[package.dependencies] +azure-core = ">=1.28.0,<2.0.0" +cryptography = ">=2.1.4" +isodate = ">=0.6.1" +typing-extensions = ">=4.3.0" + +[package.extras] +aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] + [[package]] name = "babel" version = "2.14.0" @@ -1448,6 +1489,21 @@ widgetsnbextension = ">=4.0.10,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + [[package]] name = "isoduration" version = "20.11.0" @@ -2664,8 +2720,6 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -3097,6 +3151,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3104,8 +3159,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3122,6 +3184,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3129,6 +3192,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -4169,4 +4233,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "79f71b90af5571b195f90783411304f0617b43904fc28186b5b3cd37b073b102" +content-hash = "cbed924469a5f0bbc91cec5bd6f054f02434a02bcc481059fffab9481583adc0" diff --git a/pyproject.toml b/pyproject.toml index 5bd6da1c..e1dffbd8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,6 +80,7 @@ pyfairdatatools = "0.1.3" pandas = "^2.2.0" numpy = "^1.26.4" pycap = "^2.6.0" +azure-storage-blob = "^12.19.1" [tool.poetry.group.dev.dependencies] From 11f58294d79ff6801508a44997915fffcbded452 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Sun, 7 Apr 2024 18:33:26 -0700 Subject: [PATCH 458/505] =?UTF-8?q?feat:=20=E2=9C=A8=20REDCap=20release=20?= =?UTF-8?q?ETL?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 17 ++- apis/dashboard.py | 127 ++++++++---------- modules/etl/config/aireadi_config.py | 25 ++-- .../etl/transforms/redcap_live_transform.py | 26 ++-- .../transforms/redcap_release_transform.py | 46 ++----- 5 files changed, 108 insertions(+), 133 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 36471a8b..b81f86d3 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -292,10 +292,18 @@ def authentication(): def authorization(): """it checks whether url is allowed to be reached to specific routes""" # white listed routes - public_routes = ["/auth", "/docs", "/echo", "/swaggerui", "/swagger.json", "/utils"] - - for route in public_routes: - if request.path.startswith(route): + public_route_patterns = [ + r"^/auth.*", + r"^/docs", + r"^/echo", + r"^/swaggerui.*", + r"^/swagger.json", + r"^/utils.*", + r"^/study/(?P[0-9a-f]{8}\-[0-9a-f]{4}\-4[0-9a-f]{3}\-[89ab][0-9a-f]{3}\-[0-9a-f]{12})/dashboard/public", + ] + + for route_pattern in public_route_patterns: + if bool(re.search(route_pattern, request.path)): return if g.user: return @@ -372,6 +380,7 @@ def is_granted(permission: str, study=None): "study_metadata", "version", "dataset_metadata", + "update_dashboard", ], "viewer": ["viewer", "view"], } diff --git a/apis/dashboard.py b/apis/dashboard.py index b588e4a3..061abf5a 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -229,7 +229,6 @@ def get(self, study_id: str): redcap_project_dashboard.to_dict() for redcap_project_dashboard in redcap_project_dashboards_query ] - print(redcap_project_dashboards) return redcap_project_dashboards, 201 @api.doc("Create a new study dashboard") @@ -351,28 +350,6 @@ def post(self, study_id: str): return connect_redcap_project_dashboard, 201 -@api.route("/study//dashboard/public") -class RedcapProjectDashboardsPublic(Resource): - @api.doc("Get public study dashboards") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(redcap_project_dashboard_model, as_list=True) - def get(self, study_id: str): - """Get all REDCap project dashboards""" - study = model.db.session.query(model.Study).get(study_id) - redcap_project_dashboards_query = model.StudyDashboard.query.filter_by( - study=study - ) - redcap_project_dashboards: List[Dict[str, Any]] = [ - redcap_project_dashboard.to_dict() - for redcap_project_dashboard in redcap_project_dashboards_query - ] - public_redcap_project_dashboards: List[Dict[str, Any]] = list( - filter(lambda dashboard: dashboard["public"], redcap_project_dashboards) - ) - return public_redcap_project_dashboards, 201 - - @api.route("/study//dashboard//connector") class RedcapProjectDashboardConnector(Resource): @api.doc("Get a study dashboard connector") @@ -410,16 +387,17 @@ def get(self, study_id: str, dashboard_id: str): if not is_granted("view", study): return "Access denied, you can not view this dashboard", 403 - # Retrieve Dashboard Redis Cache - cached_redcap_project_dashboard = caching.cache.get( - f"$study_id#{study_id}$dashboard_id#{dashboard_id}" - ) - - if cached_redcap_project_dashboard is not None: - return cached_redcap_project_dashboard, 201 + # Retrieve Dashboard Redis Cache if Available + # cached_redcap_project_dashboard = caching.cache.get( + # f"$study_id#{study_id}$dashboard_id#{dashboard_id}" + # ) + # if cached_redcap_project_dashboard is not None: + # return cached_redcap_project_dashboard, 201 + # Get Base Transform Config for ETL - Live transformConfig = redcapLiveTransformConfig + # Query Project Dashboard by ID redcap_project_dashboard_query: Any = model.db.session.query( model.StudyDashboard ).get(dashboard_id) @@ -465,11 +443,12 @@ def get(self, study_id: str, dashboard_id: str): ], ) - # Structure REDCap ETL Config - redcap_etl_config = { - "redcap_api_url": redcap_project_view["api_url"], - "redcap_api_key": redcap_project_view["api_key"], - } | transformConfig + # Set REDCap API Config + transformConfig["redcap_api_url"] = redcap_project_view["api_url"] + transformConfig["redcap_api_key"] = redcap_project_view["api_key"] + + # Finalize ETL Config + redcap_etl_config = transformConfig redcapTransform = RedcapLiveTransform(redcap_etl_config) @@ -499,7 +478,7 @@ def get(self, study_id: str, dashboard_id: str): f"$study_id#{study_id}$dashboard_id#{dashboard_id}", redcap_project_dashboard, ) - print("Live Transform") + return redcap_project_dashboard, 201 @api.doc("Update a study dashboard") @@ -647,43 +626,51 @@ def delete(self, study_id: str, dashboard_id: str): return 204 -@api.route("/study//dashboard//release") -class RedcapProjectDashboardRelease(Resource): - @api.doc("Get a study dashboard") +@api.route("/study//dashboard/public") +class RedcapProjectDashboardPublic(Resource): + @api.doc("Get the public study dashboard") @api.response(200, "Success") @api.response(400, "Validation Error") @api.marshal_with(redcap_project_dashboard_model) - def get(self, study_id: str, dashboard_id: str): + def get(self, study_id: str): """Get REDCap project dashboard""" model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) - if not is_granted("view", study): - return "Access denied, you can not view this dashboard", 403 + # if not is_granted("view", study): + # return "Access denied, you can not view this dashboard", 403 - # Retrieve Dashboard Redis Cache - cached_redcap_project_dashboard = caching.cache.get( - f"$study_id#{study_id}$dashboard_id#{dashboard_id}#release" + # Get Dashboard + redcap_project_dashboards_query = model.StudyDashboard.query.filter_by( + study=study, public=True ) - - if cached_redcap_project_dashboard is not None: - return cached_redcap_project_dashboard, 201 - + # List of Dashboards + redcap_project_dashboards: List[Dict[str, Any]] = [ + redcap_project_dashboard.to_dict() + for redcap_project_dashboard in redcap_project_dashboards_query + ] + # There Should Only Be One, Pop it From The List If It's There + if len(redcap_project_dashboards) > 0: + redcap_project_dashboard = redcap_project_dashboards.pop() + else: + return "No public dashboard found", 404 + + # Public Dashboard ID + dashboard_id = redcap_project_dashboard["id"] + + # Retrieve Dashboard Redis Cache if Available + # cached_redcap_project_dashboard = caching.cache.get( + # f"$study_id#{study_id}$dashboard_id#{dashboard_id}#public" + # ) + # if cached_redcap_project_dashboard is not None: + # return cached_redcap_project_dashboard, 201 + + # + # No Cache, Do ETL + # + + # Get Base Transform Config for ETL - Release transformConfig = redcapReleaseTransformConfig - redcap_project_dashboard_query: Any = model.db.session.query( - model.StudyDashboard - ).get(dashboard_id) - redcap_project_dashboard: Dict[ - str, Any - ] = redcap_project_dashboard_query.to_dict() - - # Get REDCap Project - redcap_id = redcap_project_dashboard["redcap_id"] - redcap_project_view_query: Any = model.db.session.query(model.StudyRedcap).get( - redcap_id - ) - redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() - # Set report_ids for ETL report_keys = [] for report in redcap_project_dashboard["reports"]: @@ -703,7 +690,7 @@ def get(self, study_id: str, dashboard_id: str): for report in redcapLiveTransformConfig["reports"] if report["key"] in report_keys ] - print(transformConfig["reports"]) + # Set Post Transform Merge index_columns, post_transform_merges = transformConfig["post_transform_merge"] transformConfig["post_transform_merge"] = ( @@ -715,12 +702,10 @@ def get(self, study_id: str, dashboard_id: str): ], ) - # Structure REDCap ETL Config - redcap_etl_config = { - "redcap_api_url": redcap_project_view["api_url"], - "redcap_api_key": redcap_project_view["api_key"], - } | transformConfig + # Finalize ETL Config + redcap_etl_config = transformConfig + # Execute REDCap Release ETL redcapTransform = RedcapReleaseTransform(redcap_etl_config) # Execute Dashboard Module Transforms @@ -746,8 +731,8 @@ def get(self, study_id: str, dashboard_id: str): # Create Dashboard Redis Cache caching.cache.set( - f"$study_id#{study_id}$dashboard_id#{dashboard_id}#release", + f"$study_id#{study_id}$dashboard_id#{dashboard_id}#public", redcap_project_dashboard, ) - print("Release Transform") + return redcap_project_dashboard, 201 diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index ebadf5c3..cfa493f6 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -125,7 +125,7 @@ phenotypes_column_map: Dict[str, str] = { "mhterm_dm2": "Type II Diabetes", "mhterm_predm": "Prediabetes", - "mh_a1c": "Elevated A1C", + # "mh_a1c": "Elevated A1C", } # sex_column_map: Dict[str, str] = { @@ -136,9 +136,6 @@ # "777": "Prefer not to say", # } -# race_column_map: Dict[str, str] = { - -# } treatments_column_map: Dict[str, str] = { "cmtrt_a1c": "Oral Medication", @@ -155,7 +152,7 @@ ("participant-list", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), ("participant-values", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), ("instrument-status", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), - ("repeat-instrument", {"on": index_columns, "how": "outer", "suffixes": (None, '_merged')}), + ("repeat-instrument", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), ] # @@ -170,6 +167,8 @@ "filepath": "AI-READI/REDCap", "filename": "Redcap_project_metadata.json", }, + "redcap_api_url": "", + "redcap_api_key": "", "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] { "key": "participant-list", @@ -179,7 +178,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [], @@ -192,7 +191,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [ @@ -260,7 +259,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [ @@ -280,7 +279,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [ @@ -331,7 +330,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [], @@ -344,7 +343,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [ @@ -412,7 +411,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [ @@ -432,7 +431,7 @@ "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", "report_id": "", }, "transforms": [ diff --git a/modules/etl/transforms/redcap_live_transform.py b/modules/etl/transforms/redcap_live_transform.py index 5e02a898..8079547a 100644 --- a/modules/etl/transforms/redcap_live_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -11,18 +11,18 @@ class RedcapLiveTransform(object): def __init__(self, config: dict) -> None: + print("REDCap Live Transform") + # # Config # + # Get CWD + self.cwd = os.getcwd() + # REDCap API Config self.redcap_api_url = config["redcap_api_url"] self.redcap_api_key = config["redcap_api_key"] - self.redcap_data_dir = config["redcap_data_dir"] - self.redcap_metadata_config = config["project_metadata"] - - # Set Transform Key - self.key = config["key"] if "key" in config else "redcap-transform" # Data Config self.index_columns = ( @@ -132,22 +132,22 @@ def __init__(self, config: dict) -> None: # Internal Defaults # - Key Assumptions for Transform Functions # – Only Update if REDCap API and/or PyCap Update - self._reports_kwdargs = { + self._default_report_kwdargs = { "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", } # Get & Structure Report - self.logger.info(f"Retrieving REDCap reports") + self.logger.info(f"Retrieving Live REDCap reports") self.reports = {} for report_config in self.reports_configs: # Get Report report_key = report_config["key"] - report_kwdargs = report_config["kwdargs"] | self._reports_kwdargs + report_kwdargs = report_config["kwdargs"] | self._default_report_kwdargs report_transforms = report_config["transforms"] report = self.project.export_report(**report_kwdargs) - + pd.DataFrame(report, dtype = str).to_csv(f"~/Downloads/etl-redcap-export-live-{report_kwdargs['report_id']}") # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], @@ -836,7 +836,7 @@ def export_raw( ) -> object: for report_key, report_object in self.reports.items(): filename = f"{report_key}_raw{filetype}" - filepath = os.path.join(path, filename) + filepath = os.path.join(self.cwd, path, filename) transformed = report_object["df"] transformed.to_csv( filepath, @@ -852,7 +852,7 @@ def export_transformed( ) -> object: for report_key, report_object in self.reports.items(): filename = f"{report_key}_transformed{filetype}" - filepath = os.path.join(path, filename) + filepath = os.path.join(self.cwd, path, filename) transformed = report_object["transformed"] transformed.to_csv( filepath, @@ -867,7 +867,7 @@ def export_merged_transformed( self, path: str = "", separator: str = "\t", filetype: str = ".tsv" ) -> object: filename = f"transformed-merged_redcap-extract{filetype}" - filepath = os.path.join(path, filename) + filepath = os.path.join(self.cwd, path, filename) self.merged.to_csv( filepath, sep=separator, diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py index ca18eec6..ed4f12ff 100644 --- a/modules/etl/transforms/redcap_release_transform.py +++ b/modules/etl/transforms/redcap_release_transform.py @@ -12,6 +12,8 @@ class RedcapReleaseTransform(object): def __init__(self, config: dict) -> None: + print("REDCap Release Transform") + # # Config # @@ -19,15 +21,10 @@ def __init__(self, config: dict) -> None: # Get CWD self.cwd = os.getcwd() - # REDCap API Config - self.redcap_api_url = config["redcap_api_url"] - self.redcap_api_key = config["redcap_api_key"] + # REDCap Azure Storage Access Config self.redcap_data_dir = config["redcap_data_dir"] self.redcap_metadata_config = config["project_metadata"] - # Set Transform Key - self.key = config["key"] if "key" in config else "redcap-transform" - # Data Config self.index_columns = ( config["index_columns"] if "index_columns" in config else ["record_id"] @@ -115,20 +112,6 @@ def __init__(self, config: dict) -> None: "0": "Incomplete", "": self.missing_value_generic, } - # self.boolean_map = { - # "Yes": 1.0, - # "1.0": 1.0, - # "true": 1.0, - # "True": 1.0, - # 1: 1.0, - # "No": 0.0, - # "0.0": 0.0 - # "false": 0.0, - # "False": 0.0, - # 0: 0.0, - # "": self.missing_value_generic, - # "NaN": self.missing_value_generic, - # } self.logger.info(f"Initialized") @@ -138,7 +121,6 @@ def __init__(self, config: dict) -> None: # Initialize PyCap Objects self.logger.info(f"Retrieving REDCap project data") - self.project = Project(self.redcap_api_url, self.redcap_api_key) # Load Release REDCap Project Metadata self.metadata = self.get_stored_project_metadata( @@ -154,32 +136,33 @@ def __init__(self, config: dict) -> None: # Internal Defaults # - Key Assumptions for Transform Functions # – Only Update if REDCap API and/or PyCap Update - self._reports_kwdargs = { + self._default_report_kwdargs = { "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "\t", + "csv_delimiter": "", } # Get & Structure Report - self.logger.info(f"Retrieving REDCap reports") + self.logger.info(f"Retrieving Stored REDCap reports") self.reports = {} for report_config in self.reports_configs: # Get Report report_key = report_config["key"] - report_kwdargs = report_config["kwdargs"] | self._reports_kwdargs + report_kwdargs = report_config["kwdargs"] | self._default_report_kwdargs report_transforms = report_config["transforms"] # Load Release REDCap Reports - report = self.get_stored_report( + report = report_dataframe = self.get_stored_report( os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") or "", os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER") or "", f"{report_config['filepath']}/{report_config['filename']}" ) + report.to_csv(f"~/Downloads/etl-redcap-export-release-{report_kwdargs['report_id']}") # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], - "df": pd.read_csv(f"{self.cwd}/{self.redcap_data_dir}/{report_config['filepath']}/{report_config['filename']}", delimiter = self._reports_kwdargs["csv_delimiter"], dtype = str), + "df": report_dataframe, "transforms": report_transforms, "transformed": None, "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), @@ -230,7 +213,7 @@ def get_stored_report(self, connection_string: str, container_name: str, blob_pa blob_client = container_client.get_blob_client(blob_path) # Get Blob - df = pd.read_csv(blob_client.download_blob()) + df = pd.read_csv(blob_client.download_blob(), dtype = str) return df def get_report_id(self, report_key: str) -> str: @@ -888,7 +871,7 @@ def export_raw( ) -> object: for report_key, report_object in self.reports.items(): filename = f"{report_key}_raw{filetype}" - filepath = os.path.join(path, filename) + filepath = os.path.join(self.cwd, path, filename) transformed = report_object["df"] transformed.to_csv( filepath, @@ -904,7 +887,7 @@ def export_transformed( ) -> object: for report_key, report_object in self.reports.items(): filename = f"{report_key}_transformed{filetype}" - filepath = os.path.join(path, filename) + filepath = os.path.join(self.cwd, path, filename) transformed = report_object["transformed"] transformed.to_csv( filepath, @@ -919,7 +902,7 @@ def export_merged_transformed( self, path: str = "", separator: str = "\t", filetype: str = ".tsv" ) -> object: filename = f"transformed-merged_redcap-extract{filetype}" - filepath = os.path.join(path, filename) + filepath = os.path.join(self.cwd, path, filename) self.merged.to_csv( filepath, sep=separator, @@ -928,7 +911,6 @@ def export_merged_transformed( ) return self - if __name__ == "__main__": pass else: From 70eb4bd334b9ec89ed662444d30925380eb2c12e Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 9 Apr 2024 14:28:48 -0700 Subject: [PATCH 459/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20minor=20fix=20to?= =?UTF-8?q?=20aireadi=5Fconfig.py?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .env.example | 3 +++ modules/etl/config/aireadi_config.py | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.env.example b/.env.example index f3dd1d03..70dc658d 100644 --- a/.env.example +++ b/.env.example @@ -11,3 +11,6 @@ FAIRHUB_CACHE_PORT=6379 FAIRHUB_CACHE_TYPE=RedisCache FAIRHUB_CACHE_DB=0 FAIRHUB_CACHE_URL=redis://127.0.0.1:6379 + +FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION="azure.storage.account.connection.string" +FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER="azure-stroage-container" diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index cfa493f6..7ab320a5 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -4262,15 +4262,15 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["px_racial_ethnic_discrimination_survey_complete",], + "groups": ["px_racial_ethnic_discrimination_survey_complete"], "value": "record_id", "func": "count", } ], "accessors": { "filterby": { - "name": "Count (N)", - "field": "record_id", + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", "missing_value": missing_value_generic, "astype": str, }, From b7660dd39c157d836eee4c526c50d6b59dffb3d8 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 15 Apr 2024 13:27:15 -0700 Subject: [PATCH 460/505] chore: reset From eb1f287787a295fb732aa7ef6b8763c5db1edfae Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 15 Apr 2024 13:37:51 -0700 Subject: [PATCH 461/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20add=20build=20for?= =?UTF-8?q?=20main=20pushes?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/build-and-deploy-to-main.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/build-and-deploy-to-main.yml b/.github/workflows/build-and-deploy-to-main.yml index 3bf82164..14c3c3e1 100644 --- a/.github/workflows/build-and-deploy-to-main.yml +++ b/.github/workflows/build-and-deploy-to-main.yml @@ -3,6 +3,9 @@ name: (main) Build and push api image to Azure Container Registry on: + push: + branches: + - main pull_request: types: [closed] branches: From 216df4ae6859db8a51f2ae26f55de13cbd48adb2 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Mon, 15 Apr 2024 13:50:41 -0700 Subject: [PATCH 462/505] =?UTF-8?q?=F0=9F=91=B7=20ci:=20disable=20unused?= =?UTF-8?q?=20workflow?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../workflows/staging-build-and-deploy.yml | 86 +++++++++---------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/.github/workflows/staging-build-and-deploy.yml b/.github/workflows/staging-build-and-deploy.yml index 5acd3026..cacd1338 100644 --- a/.github/workflows/staging-build-and-deploy.yml +++ b/.github/workflows/staging-build-and-deploy.yml @@ -1,48 +1,48 @@ -on: - push: - branches: - - staging - pull_request: - types: [opened, synchronize, reopened, closed] - branches: - - staging +on: + # push: + # branches: + # - staging + # pull_request: + # types: [opened, synchronize, reopened, closed] + # branches: + # - staging workflow_dispatch: - + name: Staging_Container_Workflow jobs: - build-and-deploy: - runs-on: ubuntu-latest - steps: - # checkout the repo - - name: 'Checkout GitHub Action' - uses: actions/checkout@v4 - with: - ref: staging - - - name: 'Login via Azure CLI' - uses: azure/login@v1 - with: - creds: ${{ secrets.AZURE_CREDENTIALS }} - - - name: 'Build and push image' - uses: azure/docker-login@v1 - with: - login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} - username: ${{ secrets.REGISTRY_USERNAME }} - password: ${{ secrets.REGISTRY_PASSWORD }} - - run: | - docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} - docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} + build-and-deploy: + runs-on: ubuntu-latest + steps: + # checkout the repo + - name: "Checkout GitHub Action" + uses: actions/checkout@v4 + with: + ref: staging + + - name: "Login via Azure CLI" + uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: "Build and push image" + uses: azure/docker-login@v1 + with: + login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} + username: ${{ secrets.REGISTRY_USERNAME }} + password: ${{ secrets.REGISTRY_PASSWORD }} + - run: | + docker build . -t ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} + docker push ${{ secrets.REGISTRY_LOGIN_SERVER }}/fairhub-flask-api:staging${{ github.sha }} - - name: 'Deploy to Azure Container Instances' - uses: 'azure/aci-deploy@v1' - with: - resource-group: ${{ secrets.RESOURCE_GROUP }} - dns-name-label: ${{ secrets.RESOURCE_GROUP }}${{ github.run_number }} - image: ${{ secrets.REGISTRY_LOGIN_SERVER }}/sampleapp:${{ github.sha }} - registry-login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} - registry-username: ${{ secrets.REGISTRY_USERNAME }} - registry-password: ${{ secrets.REGISTRY_PASSWORD }} - name: fairhub-flask-api-staging - location: 'west us' + - name: "Deploy to Azure Container Instances" + uses: "azure/aci-deploy@v1" + with: + resource-group: ${{ secrets.RESOURCE_GROUP }} + dns-name-label: ${{ secrets.RESOURCE_GROUP }}${{ github.run_number }} + image: ${{ secrets.REGISTRY_LOGIN_SERVER }}/sampleapp:${{ github.sha }} + registry-login-server: ${{ secrets.REGISTRY_LOGIN_SERVER }} + registry-username: ${{ secrets.REGISTRY_USERNAME }} + registry-password: ${{ secrets.REGISTRY_PASSWORD }} + name: fairhub-flask-api-staging + location: "west us" From 93d4ac91c27018b9a647dd32992329aebbde244a Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 16 Apr 2024 11:29:28 -0700 Subject: [PATCH 463/505] =?UTF-8?q?=F0=9F=97=83=EF=B8=8F=20chore:=20add=20?= =?UTF-8?q?email=20to=20bypass?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apis/authentication.py | 1 + 1 file changed, 1 insertion(+) diff --git a/apis/authentication.py b/apis/authentication.py index b81f86d3..10407d01 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -66,6 +66,7 @@ def post(self): "bpatel@fairhub.io", "sanjay@fairhub.io", "aydan@fairhub.io", + "cordier@ohsu.edu", ] if data["email_address"] not in bypassed_emails: From 9be1c676e30c0bf137a85ede5a114ade54c3eacf Mon Sep 17 00:00:00 2001 From: Greenstick Date: Tue, 16 Apr 2024 13:14:08 -0700 Subject: [PATCH 464/505] =?UTF-8?q?fix:=20=F0=9F=90=9B=20remove=20CSV=20sa?= =?UTF-8?q?ving=20in=20REDCap=20ETL?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- modules/etl/transforms/redcap_live_transform.py | 1 - modules/etl/transforms/redcap_release_transform.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/modules/etl/transforms/redcap_live_transform.py b/modules/etl/transforms/redcap_live_transform.py index 8079547a..47afdc24 100644 --- a/modules/etl/transforms/redcap_live_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -147,7 +147,6 @@ def __init__(self, config: dict) -> None: report_kwdargs = report_config["kwdargs"] | self._default_report_kwdargs report_transforms = report_config["transforms"] report = self.project.export_report(**report_kwdargs) - pd.DataFrame(report, dtype = str).to_csv(f"~/Downloads/etl-redcap-export-live-{report_kwdargs['report_id']}") # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py index ed4f12ff..552e549d 100644 --- a/modules/etl/transforms/redcap_release_transform.py +++ b/modules/etl/transforms/redcap_release_transform.py @@ -157,8 +157,6 @@ def __init__(self, config: dict) -> None: os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER") or "", f"{report_config['filepath']}/{report_config['filename']}" ) - - report.to_csv(f"~/Downloads/etl-redcap-export-release-{report_kwdargs['report_id']}") # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], From 4f7fc0da4b5dcc2e9298b9c702ef0f6b938b36c6 Mon Sep 17 00:00:00 2001 From: Aydan Gasimova <62059163+Aydawka@users.noreply.github.com> Date: Wed, 3 Jul 2024 09:20:27 -0700 Subject: [PATCH 465/505] feat: add sessions (#56) * feat: add session table * feat: add session table * chore: add/remove session in logout/change password * chore: add session in login * fix: session removal logic * chore: convert user removal into function * style: format * feat: add logout test functions * feat: add logout test functions * style: format * style: format * style: format * fix: expiration nullability * fix: alembic table removed * style: format * style: format * fix: change token storage for sessions * style: format * fix: session remove for pass change * fix: remove alembic table * style: format * fix: check session * fix: session authentication * fix: change email verified type * fix: session extension time * chore: fix session time * style: format * fix: token session * fix: destroy schema --- .../3ffefbd9c03b_email_verified_type.py | 33 ++++++ .../9698369d7a8c_create_session_table.py | 27 +++++ apis/authentication.py | 68 ++++++++---- app.py | 48 ++++---- model/__init__.py | 2 + model/session.py | 36 ++++++ model/user.py | 1 + tests/conftest.py | 6 + tests/functional/test_user.py | 104 +++++++++++++++++- 9 files changed, 280 insertions(+), 45 deletions(-) create mode 100644 alembic/versions/3ffefbd9c03b_email_verified_type.py create mode 100644 alembic/versions/9698369d7a8c_create_session_table.py create mode 100644 model/session.py diff --git a/alembic/versions/3ffefbd9c03b_email_verified_type.py b/alembic/versions/3ffefbd9c03b_email_verified_type.py new file mode 100644 index 00000000..b3792d95 --- /dev/null +++ b/alembic/versions/3ffefbd9c03b_email_verified_type.py @@ -0,0 +1,33 @@ +"""email verified type + +Revision ID: 3ffefbd9c03b +Revises: 9698369d7a8c +Create Date: 2024-07-01 12:28:02.596192 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '3ffefbd9c03b' +down_revision: Union[str, None] = '9698369d7a8c' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("user") as batch_op: + batch_op.alter_column( + "email_verified", + type_=sa.Boolean(), + postgresql_using="email_verified::boolean", + ) + + + + + + diff --git a/alembic/versions/9698369d7a8c_create_session_table.py b/alembic/versions/9698369d7a8c_create_session_table.py new file mode 100644 index 00000000..b092a8b4 --- /dev/null +++ b/alembic/versions/9698369d7a8c_create_session_table.py @@ -0,0 +1,27 @@ +"""create session table + +Revision ID: 9698369d7a8c +Revises: +Create Date: 2024-06-13 09:59:17.605666 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '9698369d7a8c' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'session', + sa.Column('id', sa.CHAR(36), primary_key=True), + sa.Column('user_id', sa.CHAR(36), sa.ForeignKey("user.id"), nullable=False), + sa.Column('expires_at', sa.BigInteger, nullable=False) + ) + diff --git a/apis/authentication.py b/apis/authentication.py index 10407d01..c6d11199 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -9,13 +9,13 @@ import uuid from datetime import timezone from typing import Any, Union +import time import jwt from email_validator import EmailNotValidError, validate_email from flask import g, make_response, request from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate - import model api = Namespace("Authentication", description="Authentication paths", path="/") @@ -233,24 +233,29 @@ def validate_is_valid_email(instance): # If not testing, directly use the 'config' module config = config_module + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=180 + ) + jti = str(uuid.uuid4()) encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=180), # noqa: W503 - "jti": str(uuid.uuid4()), + "exp": expired_in, + "jti": jti, + }, # noqa: W503 config.FAIRHUB_SECRET, algorithm="HS256", ) - resp = make_response(user.to_dict()) resp.set_cookie( "token", encoded_jwt_code, secure=True, httponly=True, samesite="None" ) - resp.status_code = 200 - + g.token = jti + added_session = model.Session.from_data(jti, expired_in.timestamp(), user) + model.db.session.add(added_session) + model.db.session.commit() return resp @@ -258,7 +263,7 @@ def authentication(): """it authenticates users to a study, sets access and refresh token. In addition, it handles error handling of expired token and non existed users""" g.user = None - + g.token = None if "token" not in request.cookies: return token: str = ( @@ -286,7 +291,20 @@ def authentication(): token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) if token_blacklist: return + # decode user user = model.User.query.get(decoded["user"]) + # decode session + session = model.Session.query.get(decoded["jti"]) + if not session: + g.user = None + g.token = None + return + + if session.expires_at < time.time(): + g.user = None + g.token = None + return + g.token = decoded["jti"] g.user = user @@ -397,6 +415,7 @@ class Logout(Resource): @api.response(400, "Validation Error") def post(self): """simply logges out user from the system""" + resp = make_response() resp.set_cookie( "token", @@ -407,6 +426,16 @@ def post(self): expires=datetime.datetime.now(timezone.utc), ) resp.status_code = 204 + + if g.user and g.token: + remove_session = ( + model.Session.query + .filter(model.Session.id == g.token) + .first() + ) + if remove_session: + model.db.session.delete(remove_session) + model.db.session.commit() return resp @@ -473,20 +502,21 @@ def confirm_new_password(instance): data: Union[Any, dict] = request.json user = model.User.query.get(g.user.id) + user.set_password(data["new_password"]) + model.db.session.commit() + session_logout() return "Password updated successfully", 200 -# @api.route("/auth/current-users") -# class CurrentUsers(Resource): -# """function is used to see all logged users in -# the system. For now, it is used for testing purposes""" +def session_logout(): + if g.user and g.token: + remove_sessions = model.Session.query.filter( + model.Session.user_id == g.user.id + ).all() -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def get(self): -# """returns all logged users in the system""" -# if not g.user: -# return None -# return g.user.to_dict() + for session in remove_sessions: + model.db.session.delete(session) + model.db.session.commit() + # return "Sessions are removed successfully", 200 diff --git a/app.py b/app.py index 00f93415..3735b7e8 100644 --- a/app.py +++ b/app.py @@ -12,7 +12,7 @@ from flask_bcrypt import Bcrypt from flask_cors import CORS from growthbook import GrowthBook -from sqlalchemy import MetaData, inspect +from sqlalchemy import MetaData, inspect, text from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import DropTable from waitress import serve @@ -77,7 +77,8 @@ def create_app(config_module=None, loglevel="INFO"): caching.cache.init_app(app) cors_origins = [ - "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", + # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://staging.app.fairhub.io", "https://app.fairhub.io", "https://staging.fairhub.io", @@ -125,16 +126,6 @@ def create_schema(): with engine.begin(): model.db.create_all() - @app.cli.command("destroy-schema") - def destroy_schema(): - """Create the database schema.""" - # If DB is Azure, Skip - if config.FAIRHUB_DATABASE_URL.find("azure") > -1: - return - engine = model.db.session.get_bind() - with engine.begin(): - model.db.drop_all() - @app.cli.command("cycle-schema") def cycle_schema(): """Destroy then re-create the database schema.""" @@ -159,6 +150,17 @@ def list_schemas(): for schema_name in schema_names: print(schema_name) + @app.cli.command("destroy-schema") + def destroy_schema(): + """Create the database schema.""" + # If DB is Azure, Skip + if config.FAIRHUB_DATABASE_URL.find("azure") > -1: + return + engine = model.db.session.get_bind() + with engine.begin() as conn: + model.db.drop_all() + conn.execute(text("DROP TABLE IF EXISTS alembic_version")) # type: ignore + @app.cli.command("inspect-schema") @click.argument("schema") def inspect_schema(schema=None): @@ -221,7 +223,7 @@ def on_after_request(resp): if request.path.startswith(route): return resp - if "token" not in request.cookies: + if "token" not in request.cookies or not g.token: return resp token: str = request.cookies.get("token") or "" # type: ignore @@ -254,15 +256,22 @@ def on_after_request(resp): if token_blacklist: resp.delete_cookie("token") return resp + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( minutes=180 ) - new_token = jwt.encode( - {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, - config.FAIRHUB_SECRET, - algorithm="HS256", - ) - resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") + session = model.Session.query.get(g.token) + session_expires_at = datetime.datetime.fromtimestamp(session.expires_at, timezone.utc) + + if expired_in - session_expires_at < datetime.timedelta(minutes=90): + + new_token = jwt.encode( + {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, + config.FAIRHUB_SECRET, + algorithm="HS256", + ) + resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") + session.expires_at = expired_in.timestamp() app.logger.info("after request") app.logger.info(request.headers.get("Origin")) @@ -278,7 +287,6 @@ def on_after_request(resp): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" app.logger.info(resp.headers) - return resp @app.errorhandler(ValidationException) diff --git a/model/__init__.py b/model/__init__.py index 33959375..204f4568 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -48,6 +48,7 @@ from .user import User from .user_details import UserDetails from .version import Version +from .session import Session from .version_readme import VersionReadme __all__ = [ @@ -102,4 +103,5 @@ "UserDetails", "Notification", "VersionReadme", + "Session", ] diff --git a/model/session.py b/model/session.py new file mode 100644 index 00000000..e26ea7e2 --- /dev/null +++ b/model/session.py @@ -0,0 +1,36 @@ +from . import User + +from .db import db + + +class Session(db.Model): # type: ignore + def __init__(self, id, user: User): # pylint: disable=redefined-builtin + self.id = id + self.user = user + + __tablename__ = "session" + id = db.Column(db.CHAR(36), primary_key=True) + expires_at = db.Column(db.BigInteger, nullable=False) + + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + + user = db.relationship( + "User", + back_populates="session", + ) + + def to_dict(self): + return { + "id": self.id, + "expires_at": self.expires_at, + "user_id": self.user_id, + } + + @staticmethod + def from_data(id, expires_at, user: User): # pylint: disable=redefined-builtin + session = Session(id, user) + session.update(expires_at) + return session + + def update(self, expires_at): + self.expires_at = expires_at diff --git a/model/user.py b/model/user.py index 9e741efc..c27a1419 100644 --- a/model/user.py +++ b/model/user.py @@ -29,6 +29,7 @@ def __init__(self, password): user_details = db.relationship("UserDetails", uselist=False, back_populates="user") token_blacklist = db.relationship("TokenBlacklist", back_populates="user") notification = db.relationship("Notification", back_populates="user") + session = db.relationship("Session", back_populates="user") def to_dict(self): return { diff --git a/tests/conftest.py b/tests/conftest.py index 96c63dff..b2717e01 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -307,6 +307,12 @@ def clients(flask_app): ) assert response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 5 + yield _logged_in_client, _admin_client, _editor_client, _viewer_client ctx.pop() diff --git a/tests/functional/test_user.py b/tests/functional/test_user.py index d325e713..b4202ea8 100644 --- a/tests/functional/test_user.py +++ b/tests/functional/test_user.py @@ -1,14 +1,15 @@ -"""Tests for user settings""" - +from model.db import db # ------------------- Password Change ------------------- # + + def test_post_password_change(clients): """ Given a Flask application configured for testing WHEN the '/auth/password/change' endpoint is requested (PUT) THEN check that the response is valid and the password is changed """ - _logged_in_client = clients[0] + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients response = _logged_in_client.post( "/auth/password/change", @@ -18,8 +19,40 @@ def test_post_password_change(clients): "old_password": "Testingyeshello11!", }, ) + a_response = _admin_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + e_response = _editor_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + v_response = _viewer_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) assert response.status_code == 200 + assert a_response.status_code == 200 + assert e_response.status_code == 200 + assert v_response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 0 def test_post_password_login_invalid_old_password(clients): @@ -28,7 +61,7 @@ def test_post_password_login_invalid_old_password(clients): WHEN the '/auth/login' endpoint is requested (POST) THEN check that the response is an error when old password is provided """ - _logged_in_client = clients[0] + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients response = _logged_in_client.post( "/auth/login", json={ @@ -36,8 +69,31 @@ def test_post_password_login_invalid_old_password(clients): "password": "Testingyeshello11!", }, ) - + a_response = _admin_client.post( + "/auth/login", + json={ + "email_address": "admin@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + e_response = _editor_client.post( + "/auth/login", + json={ + "email_address": "editor@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + v_response = _viewer_client.post( + "/auth/login", + json={ + "email_address": "viewer@fairhub.io", + "password": "Testingyeshello11!", + }, + ) assert response.status_code == 401 + assert a_response.status_code == 401 + assert e_response.status_code == 401 + assert v_response.status_code == 401 def test_post_login_new_password(clients): @@ -54,5 +110,41 @@ def test_post_login_new_password(clients): "password": "Updatedpassword4testing!", }, ) - assert response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 1 + + +def test_post_logout(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/login' endpoint is requested (POST) + THEN check that the response is valid when new password is provided + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + + response = _logged_in_client.post( + "/auth/logout" + ) + a_response = _admin_client.post( + "/auth/logout" + ) + e_response = _editor_client.post( + "/auth/logout" + ) + v_response = _viewer_client.post( + "/auth/logout" + ) + + assert response.status_code == 204 + assert a_response.status_code == 204 + assert e_response.status_code == 204 + assert v_response.status_code == 204 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 0 From b027a2f60a647e2dd7ed06477cfaf401b0daefd8 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Fri, 26 Jul 2024 13:58:30 -0700 Subject: [PATCH 466/505] chore: rebuild image From 45e4f5cccdc03f82dd51c2d841a4a45ad8f6859f Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 1 Aug 2024 13:01:07 -0700 Subject: [PATCH 467/505] chore: test migration From 8d98dae133cb708f4e259cb7ecba98c96fe77b7c Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Thu, 1 Aug 2024 13:07:15 -0700 Subject: [PATCH 468/505] Revert "feat: add sessions (#56)" (#58) This reverts commit 4f7fc0da4b5dcc2e9298b9c702ef0f6b938b36c6. --- .../3ffefbd9c03b_email_verified_type.py | 33 ------ .../9698369d7a8c_create_session_table.py | 27 ----- apis/authentication.py | 68 ++++-------- app.py | 48 ++++---- model/__init__.py | 2 - model/session.py | 36 ------ model/user.py | 1 - tests/conftest.py | 6 - tests/functional/test_user.py | 104 +----------------- 9 files changed, 45 insertions(+), 280 deletions(-) delete mode 100644 alembic/versions/3ffefbd9c03b_email_verified_type.py delete mode 100644 alembic/versions/9698369d7a8c_create_session_table.py delete mode 100644 model/session.py diff --git a/alembic/versions/3ffefbd9c03b_email_verified_type.py b/alembic/versions/3ffefbd9c03b_email_verified_type.py deleted file mode 100644 index b3792d95..00000000 --- a/alembic/versions/3ffefbd9c03b_email_verified_type.py +++ /dev/null @@ -1,33 +0,0 @@ -"""email verified type - -Revision ID: 3ffefbd9c03b -Revises: 9698369d7a8c -Create Date: 2024-07-01 12:28:02.596192 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '3ffefbd9c03b' -down_revision: Union[str, None] = '9698369d7a8c' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - with op.batch_alter_table("user") as batch_op: - batch_op.alter_column( - "email_verified", - type_=sa.Boolean(), - postgresql_using="email_verified::boolean", - ) - - - - - - diff --git a/alembic/versions/9698369d7a8c_create_session_table.py b/alembic/versions/9698369d7a8c_create_session_table.py deleted file mode 100644 index b092a8b4..00000000 --- a/alembic/versions/9698369d7a8c_create_session_table.py +++ /dev/null @@ -1,27 +0,0 @@ -"""create session table - -Revision ID: 9698369d7a8c -Revises: -Create Date: 2024-06-13 09:59:17.605666 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision: str = '9698369d7a8c' -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - op.create_table( - 'session', - sa.Column('id', sa.CHAR(36), primary_key=True), - sa.Column('user_id', sa.CHAR(36), sa.ForeignKey("user.id"), nullable=False), - sa.Column('expires_at', sa.BigInteger, nullable=False) - ) - diff --git a/apis/authentication.py b/apis/authentication.py index c6d11199..10407d01 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -9,13 +9,13 @@ import uuid from datetime import timezone from typing import Any, Union -import time import jwt from email_validator import EmailNotValidError, validate_email from flask import g, make_response, request from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate + import model api = Namespace("Authentication", description="Authentication paths", path="/") @@ -233,29 +233,24 @@ def validate_is_valid_email(instance): # If not testing, directly use the 'config' module config = config_module - expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( - minutes=180 - ) - jti = str(uuid.uuid4()) encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": expired_in, - "jti": jti, - + "exp": datetime.datetime.now(timezone.utc) + + datetime.timedelta(minutes=180), # noqa: W503 + "jti": str(uuid.uuid4()), }, # noqa: W503 config.FAIRHUB_SECRET, algorithm="HS256", ) + resp = make_response(user.to_dict()) resp.set_cookie( "token", encoded_jwt_code, secure=True, httponly=True, samesite="None" ) - g.token = jti - added_session = model.Session.from_data(jti, expired_in.timestamp(), user) - model.db.session.add(added_session) - model.db.session.commit() + resp.status_code = 200 + return resp @@ -263,7 +258,7 @@ def authentication(): """it authenticates users to a study, sets access and refresh token. In addition, it handles error handling of expired token and non existed users""" g.user = None - g.token = None + if "token" not in request.cookies: return token: str = ( @@ -291,20 +286,7 @@ def authentication(): token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) if token_blacklist: return - # decode user user = model.User.query.get(decoded["user"]) - # decode session - session = model.Session.query.get(decoded["jti"]) - if not session: - g.user = None - g.token = None - return - - if session.expires_at < time.time(): - g.user = None - g.token = None - return - g.token = decoded["jti"] g.user = user @@ -415,7 +397,6 @@ class Logout(Resource): @api.response(400, "Validation Error") def post(self): """simply logges out user from the system""" - resp = make_response() resp.set_cookie( "token", @@ -426,16 +407,6 @@ def post(self): expires=datetime.datetime.now(timezone.utc), ) resp.status_code = 204 - - if g.user and g.token: - remove_session = ( - model.Session.query - .filter(model.Session.id == g.token) - .first() - ) - if remove_session: - model.db.session.delete(remove_session) - model.db.session.commit() return resp @@ -502,21 +473,20 @@ def confirm_new_password(instance): data: Union[Any, dict] = request.json user = model.User.query.get(g.user.id) - user.set_password(data["new_password"]) - model.db.session.commit() - session_logout() return "Password updated successfully", 200 -def session_logout(): - if g.user and g.token: - remove_sessions = model.Session.query.filter( - model.Session.user_id == g.user.id - ).all() +# @api.route("/auth/current-users") +# class CurrentUsers(Resource): +# """function is used to see all logged users in +# the system. For now, it is used for testing purposes""" - for session in remove_sessions: - model.db.session.delete(session) - model.db.session.commit() - # return "Sessions are removed successfully", 200 +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def get(self): +# """returns all logged users in the system""" +# if not g.user: +# return None +# return g.user.to_dict() diff --git a/app.py b/app.py index 3735b7e8..00f93415 100644 --- a/app.py +++ b/app.py @@ -12,7 +12,7 @@ from flask_bcrypt import Bcrypt from flask_cors import CORS from growthbook import GrowthBook -from sqlalchemy import MetaData, inspect, text +from sqlalchemy import MetaData, inspect from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import DropTable from waitress import serve @@ -77,8 +77,7 @@ def create_app(config_module=None, loglevel="INFO"): caching.cache.init_app(app) cors_origins = [ - "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", - # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://staging.app.fairhub.io", "https://app.fairhub.io", "https://staging.fairhub.io", @@ -126,6 +125,16 @@ def create_schema(): with engine.begin(): model.db.create_all() + @app.cli.command("destroy-schema") + def destroy_schema(): + """Create the database schema.""" + # If DB is Azure, Skip + if config.FAIRHUB_DATABASE_URL.find("azure") > -1: + return + engine = model.db.session.get_bind() + with engine.begin(): + model.db.drop_all() + @app.cli.command("cycle-schema") def cycle_schema(): """Destroy then re-create the database schema.""" @@ -150,17 +159,6 @@ def list_schemas(): for schema_name in schema_names: print(schema_name) - @app.cli.command("destroy-schema") - def destroy_schema(): - """Create the database schema.""" - # If DB is Azure, Skip - if config.FAIRHUB_DATABASE_URL.find("azure") > -1: - return - engine = model.db.session.get_bind() - with engine.begin() as conn: - model.db.drop_all() - conn.execute(text("DROP TABLE IF EXISTS alembic_version")) # type: ignore - @app.cli.command("inspect-schema") @click.argument("schema") def inspect_schema(schema=None): @@ -223,7 +221,7 @@ def on_after_request(resp): if request.path.startswith(route): return resp - if "token" not in request.cookies or not g.token: + if "token" not in request.cookies: return resp token: str = request.cookies.get("token") or "" # type: ignore @@ -256,22 +254,15 @@ def on_after_request(resp): if token_blacklist: resp.delete_cookie("token") return resp - expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( minutes=180 ) - session = model.Session.query.get(g.token) - session_expires_at = datetime.datetime.fromtimestamp(session.expires_at, timezone.utc) - - if expired_in - session_expires_at < datetime.timedelta(minutes=90): - - new_token = jwt.encode( - {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, - config.FAIRHUB_SECRET, - algorithm="HS256", - ) - resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") - session.expires_at = expired_in.timestamp() + new_token = jwt.encode( + {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, + config.FAIRHUB_SECRET, + algorithm="HS256", + ) + resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") app.logger.info("after request") app.logger.info(request.headers.get("Origin")) @@ -287,6 +278,7 @@ def on_after_request(resp): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" app.logger.info(resp.headers) + return resp @app.errorhandler(ValidationException) diff --git a/model/__init__.py b/model/__init__.py index 204f4568..33959375 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -48,7 +48,6 @@ from .user import User from .user_details import UserDetails from .version import Version -from .session import Session from .version_readme import VersionReadme __all__ = [ @@ -103,5 +102,4 @@ "UserDetails", "Notification", "VersionReadme", - "Session", ] diff --git a/model/session.py b/model/session.py deleted file mode 100644 index e26ea7e2..00000000 --- a/model/session.py +++ /dev/null @@ -1,36 +0,0 @@ -from . import User - -from .db import db - - -class Session(db.Model): # type: ignore - def __init__(self, id, user: User): # pylint: disable=redefined-builtin - self.id = id - self.user = user - - __tablename__ = "session" - id = db.Column(db.CHAR(36), primary_key=True) - expires_at = db.Column(db.BigInteger, nullable=False) - - user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) - - user = db.relationship( - "User", - back_populates="session", - ) - - def to_dict(self): - return { - "id": self.id, - "expires_at": self.expires_at, - "user_id": self.user_id, - } - - @staticmethod - def from_data(id, expires_at, user: User): # pylint: disable=redefined-builtin - session = Session(id, user) - session.update(expires_at) - return session - - def update(self, expires_at): - self.expires_at = expires_at diff --git a/model/user.py b/model/user.py index c27a1419..9e741efc 100644 --- a/model/user.py +++ b/model/user.py @@ -29,7 +29,6 @@ def __init__(self, password): user_details = db.relationship("UserDetails", uselist=False, back_populates="user") token_blacklist = db.relationship("TokenBlacklist", back_populates="user") notification = db.relationship("Notification", back_populates="user") - session = db.relationship("Session", back_populates="user") def to_dict(self): return { diff --git a/tests/conftest.py b/tests/conftest.py index b2717e01..96c63dff 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -307,12 +307,6 @@ def clients(flask_app): ) assert response.status_code == 200 - meta = db.metadata - for table in reversed(meta.sorted_tables): - if table.name == 'session': - session_entries = db.session.execute(table.select()).fetchall() - assert len(session_entries) == 5 - yield _logged_in_client, _admin_client, _editor_client, _viewer_client ctx.pop() diff --git a/tests/functional/test_user.py b/tests/functional/test_user.py index b4202ea8..d325e713 100644 --- a/tests/functional/test_user.py +++ b/tests/functional/test_user.py @@ -1,15 +1,14 @@ -from model.db import db - -# ------------------- Password Change ------------------- # +"""Tests for user settings""" +# ------------------- Password Change ------------------- # def test_post_password_change(clients): """ Given a Flask application configured for testing WHEN the '/auth/password/change' endpoint is requested (PUT) THEN check that the response is valid and the password is changed """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + _logged_in_client = clients[0] response = _logged_in_client.post( "/auth/password/change", @@ -19,40 +18,8 @@ def test_post_password_change(clients): "old_password": "Testingyeshello11!", }, ) - a_response = _admin_client.post( - "/auth/password/change", - json={ - "confirm_password": "Updatedpassword4testing!", - "new_password": "Updatedpassword4testing!", - "old_password": "Testingyeshello11!", - }, - ) - e_response = _editor_client.post( - "/auth/password/change", - json={ - "confirm_password": "Updatedpassword4testing!", - "new_password": "Updatedpassword4testing!", - "old_password": "Testingyeshello11!", - }, - ) - v_response = _viewer_client.post( - "/auth/password/change", - json={ - "confirm_password": "Updatedpassword4testing!", - "new_password": "Updatedpassword4testing!", - "old_password": "Testingyeshello11!", - }, - ) assert response.status_code == 200 - assert a_response.status_code == 200 - assert e_response.status_code == 200 - assert v_response.status_code == 200 - meta = db.metadata - for table in reversed(meta.sorted_tables): - if table.name == 'session': - session_entries = db.session.execute(table.select()).fetchall() - assert len(session_entries) == 0 def test_post_password_login_invalid_old_password(clients): @@ -61,7 +28,7 @@ def test_post_password_login_invalid_old_password(clients): WHEN the '/auth/login' endpoint is requested (POST) THEN check that the response is an error when old password is provided """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + _logged_in_client = clients[0] response = _logged_in_client.post( "/auth/login", json={ @@ -69,31 +36,8 @@ def test_post_password_login_invalid_old_password(clients): "password": "Testingyeshello11!", }, ) - a_response = _admin_client.post( - "/auth/login", - json={ - "email_address": "admin@fairhub.io", - "password": "Testingyeshello11!", - }, - ) - e_response = _editor_client.post( - "/auth/login", - json={ - "email_address": "editor@fairhub.io", - "password": "Testingyeshello11!", - }, - ) - v_response = _viewer_client.post( - "/auth/login", - json={ - "email_address": "viewer@fairhub.io", - "password": "Testingyeshello11!", - }, - ) + assert response.status_code == 401 - assert a_response.status_code == 401 - assert e_response.status_code == 401 - assert v_response.status_code == 401 def test_post_login_new_password(clients): @@ -110,41 +54,5 @@ def test_post_login_new_password(clients): "password": "Updatedpassword4testing!", }, ) - assert response.status_code == 200 - meta = db.metadata - for table in reversed(meta.sorted_tables): - if table.name == 'session': - session_entries = db.session.execute(table.select()).fetchall() - assert len(session_entries) == 1 - - -def test_post_logout(clients): - """ - Given a Flask application configured for testing - WHEN the '/auth/login' endpoint is requested (POST) - THEN check that the response is valid when new password is provided - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - - response = _logged_in_client.post( - "/auth/logout" - ) - a_response = _admin_client.post( - "/auth/logout" - ) - e_response = _editor_client.post( - "/auth/logout" - ) - v_response = _viewer_client.post( - "/auth/logout" - ) - assert response.status_code == 204 - assert a_response.status_code == 204 - assert e_response.status_code == 204 - assert v_response.status_code == 204 - meta = db.metadata - for table in reversed(meta.sorted_tables): - if table.name == 'session': - session_entries = db.session.execute(table.select()).fetchall() - assert len(session_entries) == 0 + assert response.status_code == 200 From f37a654c155417bd0219ca57aee0b356e108e221 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Thu, 7 Nov 2024 15:43:01 -0800 Subject: [PATCH 469/505] feat: year 2 dashboard updates --- apis/dashboard.py | 4 +- apis/redcap.py | 83 ---- modules/etl/config/aireadi_config.py | 396 ++++++++++++++++-- modules/etl/transforms/module_transform.py | 2 +- .../etl/transforms/redcap_live_transform.py | 17 +- .../transforms/redcap_release_transform.py | 17 +- modules/etl/vtypes/vtype.py | 18 - 7 files changed, 372 insertions(+), 165 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 061abf5a..70c60d1b 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -387,7 +387,7 @@ def get(self, study_id: str, dashboard_id: str): if not is_granted("view", study): return "Access denied, you can not view this dashboard", 403 - # Retrieve Dashboard Redis Cache if Available + # # Retrieve Dashboard Redis Cache if Available # cached_redcap_project_dashboard = caching.cache.get( # f"$study_id#{study_id}$dashboard_id#{dashboard_id}" # ) @@ -657,7 +657,7 @@ def get(self, study_id: str): # Public Dashboard ID dashboard_id = redcap_project_dashboard["id"] - # Retrieve Dashboard Redis Cache if Available + # # Retrieve Dashboard Redis Cache if Available # cached_redcap_project_dashboard = caching.cache.get( # f"$study_id#{study_id}$dashboard_id#{dashboard_id}#public" # ) diff --git a/apis/redcap.py b/apis/redcap.py index 7c24d120..edb3d65e 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -247,86 +247,3 @@ def delete(self, study_id: str, redcap_id: str): model.db.session.commit() return 204 - -# @api.route("/study//redcap") -# class EditRedcapProjectAPI(Resource): -# @api.doc(parser=project_parser) -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_api_view_model) -# def put(self, study_id: int): -# """Update REDCap project API link""" -# study = model.Study.query.get(study_id) -# if not is_granted("update_redcap", study): -# return "Access denied, you can not modify this redcap project", 403 -# # Schema validation -# data: Union[Any, dict] = request.json -# schema = { -# "type": "object", -# "additionalProperties": False, -# "required": [ -# "api_pid", -# "title", -# "api_url", -# "api_active", -# ], -# "properties": { -# "api_pid": {"type": "string", "minLength": 1, "maxLength": 12}, -# "title": {"type": "string", "minLength": 1}, -# "api_url": {"type": "string", "minLength": 1}, -# "api_active": {"type": "boolean"}, -# }, -# } -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 - -# if len(data["api_pid"]) < 1: -# return ( -# f"""redcap api_pid is required for redcap access: -# {data['api_pid']}""", -# 400, -# ) -# if len(data["title"]) < 1: -# return ( -# f"""redcap title is required for redcap access: -# {data['title']}""", -# 400, -# ) -# if len(data["api_url"]) < 1: -# return ( -# f"""redcap api_url is required for redcap access: -# {data['api_url']}""", -# 400, -# ) -# if not isinstance(data["api_active"], bool): -# return ( -# f"""redcap api_active is required for redcap access: -# {data['api_active']}""", -# 400, -# ) -# update_redcap_project_view = model.StudyRedcap.query.get( -# data["api_pid"] -# ) -# update_redcap_project_view.update(data) -# model.db.session.commit() -# update_redcap_project_view = update_redcap_project_view.to_dict() -# return update_redcap_project_view, 201 - - -# @api.route("/study//redcap") -# class DeleteRedcapProjectAPI(Resource): -# @api.doc(parser=project_parser) -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# @api.marshal_with(redcap_project_api_view_model) -# def delete(self, study_id: int): -# """Delete REDCap project API link""" -# study = model.Study.query.get(study_id) -# if not is_granted("delete_redcap", study): -# return "Access denied, you can not delete this redcap project", 403 -# api_pid = project_parser.parse_args()["api_pid"] -# model.StudyRedcap.query.filter_by(api_pid=api_pid).delete() -# model.db.session.commit() -# return 204 diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 7ab320a5..419fecb1 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -49,7 +49,12 @@ "cmtrt_insln", "cmtrt_glcs", "cmtrt_lfst", - "dricmpdat", + "pacmpdat", +] + +phase_2_columns: List = [ + "race_db", + "export_group", ] computed_columns: List = [ @@ -119,7 +124,7 @@ "2": "Complete", "1": "Unverified", "0": "Incomplete", - "": "Value Unavailable", + "": missing_value_generic, } phenotypes_column_map: Dict[str, str] = { @@ -128,6 +133,21 @@ # "mh_a1c": "Elevated A1C", } +race_db_map: Dict[str, str] = { + "white": "White", + "black": "Black", + "hispanic": "Hispanic or Latino", + "asian": "Asian", + "unknown": "Unknown", + "": "Value Unavailable", + "Value Unavailable": "Value Unavailable", +} + +export_group_map: Dict[str, str] = { + "pilot": "Pilot", + "year2": "Year 2", +} + # sex_column_map: Dict[str, str] = { # "M": "Male", # "F": "Female", @@ -173,7 +193,7 @@ { "key": "participant-list", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_247884.csv", + "filename": "Redcap_data_report_307916.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -186,7 +206,7 @@ { "key": "participant-values", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_242544.csv", + "filename": "Redcap_data_report_307918.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -196,11 +216,13 @@ }, "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), - ("map_missing_values_by_columns", {"columns": data_columns}), + ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), + ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), + ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), ( "transform_values_by_column", { - "column": "dricmpdat", + "column": "pacmpdat", "new_column_name": "visitweek", # ISO 8601 string format token for front-end: %V "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, @@ -210,7 +232,7 @@ ( "transform_values_by_column", { - "column": "dricmpdat", + "column": "pacmpdat", "new_column_name": "visityear", # ISO 8601 string format token for front-end: %Y "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, @@ -220,7 +242,7 @@ ( "transform_values_by_column", { - "column": "dricmpdat", + "column": "pacmpdat", "new_column_name": "visitdate", # ISO 8601 string format token for front-end: %Y "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), @@ -247,14 +269,14 @@ ), ( "keep_columns", - {"columns": index_columns + data_columns + computed_columns}, + {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, ), ], }, { "key": "instrument-status", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_251954.csv", + "filename": "Redcap_data_report_307920.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -274,7 +296,7 @@ { "key": "repeat-instrument", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_259920.csv", + "filename": "Redcap_data_report_307922.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -325,7 +347,7 @@ { "key": "participant-list", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_247884.csv", + "filename": "Redcap_data_report_307916.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -338,7 +360,7 @@ { "key": "participant-values", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_242544.csv", + "filename": "Redcap_data_report_307918.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -348,11 +370,13 @@ }, "transforms": [ ("remap_values_by_columns", {"columns": data_columns}), - ("map_missing_values_by_columns", {"columns": data_columns}), + ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), + ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), + ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), ( "transform_values_by_column", { - "column": "dricmpdat", + "column": "pacmpdat", "new_column_name": "visitweek", # ISO 8601 string format token for front-end: %V "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, @@ -362,7 +386,7 @@ ( "transform_values_by_column", { - "column": "dricmpdat", + "column": "pacmpdat", "new_column_name": "visityear", # ISO 8601 string format token for front-end: %Y "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, @@ -372,7 +396,7 @@ ( "transform_values_by_column", { - "column": "dricmpdat", + "column": "pacmpdat", "new_column_name": "visitdate", # ISO 8601 string format token for front-end: %Y "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), @@ -399,14 +423,14 @@ ), ( "keep_columns", - {"columns": index_columns + data_columns + computed_columns}, + {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, ), ], }, { "key": "instrument-status", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_251954.csv", + "filename": "Redcap_data_report_307920.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -426,7 +450,7 @@ { "key": "repeat-instrument", "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_259920.csv", + "filename": "Redcap_data_report_307922.csv", "kwdargs": { "raw_or_label": "raw", "raw_or_label_headers": "raw", @@ -1466,6 +1490,103 @@ }, ) +# Phenotype Recruitment Counts by Phase +phenotypeRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Phenotype Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "phenotypes", "visitdate"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race Recruitment Counts by Phase +raceRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "race_db", "visitdate"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + + # Race Recruitment Counts raceRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", @@ -1478,7 +1599,7 @@ "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["race", "visitdate"], + "groups": ["race_db", "visitdate"], "value": "record_id", "func": "count", } @@ -1486,13 +1607,13 @@ "accessors": { "filterby": { "name": "Race", - "field": "race", + "field": "race_db", "missing_value": missing_value_generic, "astype": str, }, "group": { "name": "Race", - "field": "race", + "field": "race_db", "missing_value": missing_value_generic, "astype": str, }, @@ -1526,7 +1647,7 @@ "vtype": "DoubleDiscreteTimeseries", "methods": [ { - "groups": ["siteid", "race", "visitdate"], + "groups": ["siteid", "race_db", "visitdate"], "value": "record_id", "func": "count", } @@ -1540,7 +1661,7 @@ }, "group": { "name": "Race", - "field": "race", + "field": "race_db", "missing_value": missing_value_generic, "astype": str, }, @@ -1658,6 +1779,56 @@ }, ) +# Sex Counts by Phase +sexRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "sex-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Sex Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "scrsex", "visitdate"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + + + # Race & Sex Counts by Race raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( "simpleTransform", @@ -1670,7 +1841,7 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["scrsex", "race", "siteid"], + "groups": ["scrsex", "race_db", "siteid"], "value": "record_id", "func": "count", } @@ -1689,7 +1860,7 @@ }, "subgroup": { "name": "Race", - "field": "race", + "field": "race_db", "missing_value": missing_value_generic, "astype": str, }, @@ -1811,7 +1982,7 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["phenotypes", "race_db", "scrsex"], "value": "record_id", "func": "count", } @@ -1830,7 +2001,54 @@ }, "subgroup": { "name": "Race", - "field": "race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Phenotype & Race Counts by Phase +phenotypeRaceByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "phenotype-race-by-phase", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Race by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "export_group"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", "missing_value": missing_value_generic, "astype": str, }, @@ -1858,7 +2076,7 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["phenotypes", "race_db", "scrsex"], "value": "record_id", "func": "count", } @@ -1866,7 +2084,7 @@ "accessors": { "filterby": { "name": "Race", - "field": "race", + "field": "race_db", "missing_value": missing_value_generic, }, "group": { @@ -1905,7 +2123,7 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["phenotypes", "race_db", "scrsex"], "value": "record_id", "func": "count", } @@ -1913,7 +2131,7 @@ "accessors": { "filterby": { "name": "Race", - "field": "race", + "field": "race_db", "missing_value": missing_value_generic, }, "group": { @@ -1952,7 +2170,7 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["phenotypes", "race_db", "scrsex"], "value": "record_id", "func": "count", } @@ -1971,7 +2189,54 @@ }, "subgroup": { "name": "Race", - "field": "race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race & Sex Counts by Phase +raceSexByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-sex-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Phase", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["export_group", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", "missing_value": missing_value_generic, "astype": str, }, @@ -1999,7 +2264,7 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["phenotypes", "race_db", "scrsex"], "value": "record_id", "func": "count", } @@ -2012,7 +2277,7 @@ }, "group": { "name": "Race", - "field": "race", + "field": "race_db", "missing_value": missing_value_generic, "astype": str, }, @@ -2046,7 +2311,7 @@ "vtype": "DoubleCategorical", "methods": [ { - "groups": ["phenotypes", "race", "scrsex"], + "groups": ["phenotypes", "race_db", "scrsex"], "value": "record_id", "func": "count", } @@ -2059,7 +2324,54 @@ }, "group": { "name": "Race", - "field": "race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, +) + +# Race & Phenotype Counts by Phase +racePhenotypeByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( + "simpleTransform", + { + "key": "race-phenotype-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race & Phenotype by Phase", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "export_group"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", "missing_value": missing_value_generic, "astype": str, }, @@ -4345,17 +4657,23 @@ "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, + "phenotype-race-by-phase": phenotypeRaceByPhaseTransformConfig, "phenotype-sex-by-race": phenotypeSexByRaceTransformConfig, "race-phenotype-by-sex": racePhenotypeBySexTransformConfig, + "race-phenotype-by-phase": racePhenotypeByPhaseTransformConfig, + "race-sex-by-phase": raceSexByPhaseTransformConfig, "race-sex-by-phenotype": raceSexByPhenotypeTransformConfig, "sex-phenotype-by-race": sexPhenotypeByRaceTransformConfig, "sex-race-by-phenotype": sexRaceByPhenotypeTransformConfig, "phenotype-recruitment": phenotypeRecruitmentTransformConfig, "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, + "phenotype-recruitment-by-phase": phenotypeRecruitmentByPhaseTransformConfig, "race-recruitment": raceRecruitmentTransformConfig, "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, + "race-recruitment-by-phase": raceRecruitmentByPhaseTransformConfig, "sex-recruitment": sexRecruitmentTransformConfig, "sex-recruitment-by-site": sexRecruitmentBySiteTransformConfig, + "sex-recruitment-by-phase": sexRecruitmentByPhaseTransformConfig, "race-sex-by-site": raceSexBySiteTransformConfig, "current-medications-by-site": currentMedicationsBySiteTransformConfig, } diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index b568aa4b..9da71265 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -67,7 +67,7 @@ def __init__( # Normalize Transforms to List Type, Check Validity, and Warn on Missing Attributes for indexed_transform in enumerate(self.transforms): - self.valid = True if self._transformIsValid(indexed_transform) else False + self.valid: bool = self._transformIsValid(indexed_transform) if self.strict and not self.valid: raise ValueError( f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" diff --git a/modules/etl/transforms/redcap_live_transform.py b/modules/etl/transforms/redcap_live_transform.py index 47afdc24..da7c1635 100644 --- a/modules/etl/transforms/redcap_live_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -105,12 +105,6 @@ def __init__(self, config: dict) -> None: self.missing_value_generic, ] self.none_map = {key: self.missing_value_generic for key in self.none_values} - self.survey_instrument_map = { - "2": "Complete", - "1": "Unverified", - "0": "Incomplete", - "": self.missing_value_generic, - } self.logger.info(f"Initialized") @@ -235,7 +229,8 @@ def _merge_reports( ] if len(merge_steps) > 0: - for providing_report_key, merge_kwdargs in merge_steps: + for merge_step in merge_steps: + providing_report_key, merge_kwdargs = merge_step df_providing_report = self.reports[providing_report_key]["transformed"] df_receiving_report = df_receiving_report.merge( df_providing_report, **merge_kwdargs @@ -432,13 +427,14 @@ def _remap_values_by_columns( for subvalue in str(value).split(",") if len(subvalue) > 0 ] - df.loc[i, column] = self.multivalue_separator.join( + remapped_value = self.multivalue_separator.join( [ value_map[subvalue] for subvalue in subvalues if subvalue in value_map.keys() ] ) + df.loc[i, column] = remapped_value return df @@ -863,10 +859,9 @@ def export_transformed( # Export Merged Transforms def export_merged_transformed( - self, path: str = "", separator: str = "\t", filetype: str = ".tsv" + self, filepath: str = "transformed-merged_redcap-extract.tsv", separator: str = "\t" ) -> object: - filename = f"transformed-merged_redcap-extract{filetype}" - filepath = os.path.join(self.cwd, path, filename) + filepath = os.path.join(self.cwd, filepath) self.merged.to_csv( filepath, sep=separator, diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py index 552e549d..9cabd1cb 100644 --- a/modules/etl/transforms/redcap_release_transform.py +++ b/modules/etl/transforms/redcap_release_transform.py @@ -106,12 +106,6 @@ def __init__(self, config: dict) -> None: self.missing_value_generic, ] self.none_map = {key: self.missing_value_generic for key in self.none_values} - self.survey_instrument_map = { - "2": "Complete", - "1": "Unverified", - "0": "Incomplete", - "": self.missing_value_generic, - } self.logger.info(f"Initialized") @@ -268,7 +262,8 @@ def _merge_reports( ] if len(merge_steps) > 0: - for providing_report_key, merge_kwdargs in merge_steps: + for merge_step in merge_steps: + providing_report_key, merge_kwdargs = merge_step df_providing_report = self.reports[providing_report_key]["transformed"] df_receiving_report = df_receiving_report.merge( df_providing_report, **merge_kwdargs @@ -465,13 +460,14 @@ def _remap_values_by_columns( for subvalue in str(value).split(",") if len(subvalue) > 0 ] - df.loc[i, column] = self.multivalue_separator.join( + remapped_value = self.multivalue_separator.join( [ value_map[subvalue] for subvalue in subvalues if subvalue in value_map.keys() ] ) + df.loc[i, column] = remapped_value return df @@ -897,10 +893,9 @@ def export_transformed( # Export Merged Transforms def export_merged_transformed( - self, path: str = "", separator: str = "\t", filetype: str = ".tsv" + self, filepath: str = "transformed-merged_redcap-extract.tsv", separator: str = "\t" ) -> object: - filename = f"transformed-merged_redcap-extract{filetype}" - filepath = os.path.join(self.cwd, path, filename) + filepath = os.path.join(self.cwd, filepath) self.merged.to_csv( filepath, sep=separator, diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index b565f829..7e1bb6b8 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -55,24 +55,6 @@ def __init__( def __str__(self): return f"{self.__dict__}" - # def isvalid( - # self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] - # ) -> bool: - # """ - # Extends the VType.isvalid method to operate on a list - # of pd.DataFrames and accessors. - # """ - # valid = True - # for accessors in accessorsList: - # if not super(Compound, self).isvalid(df, accessors): - # self.validation_errors.append( - # f"VType {self.name.title()} has invalid accessors. See additional details above." - # ) - # valid = False - # else: - # continue - # return valid - def isvalid( self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] ) -> bool: From e68846429d47e03468ee26a78f2d64ec7d0ea666 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Thu, 7 Nov 2024 15:46:11 -0800 Subject: [PATCH 470/505] fix: precommit error --- apis/redcap.py | 1 - modules/etl/transforms/module_transform.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/apis/redcap.py b/apis/redcap.py index edb3d65e..b2770b98 100644 --- a/apis/redcap.py +++ b/apis/redcap.py @@ -246,4 +246,3 @@ def delete(self, study_id: str, redcap_id: str): model.StudyRedcap.query.filter_by(id=redcap_id).delete() model.db.session.commit() return 204 - diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index 9da71265..6619fe73 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -67,7 +67,7 @@ def __init__( # Normalize Transforms to List Type, Check Validity, and Warn on Missing Attributes for indexed_transform in enumerate(self.transforms): - self.valid: bool = self._transformIsValid(indexed_transform) + self.valid = self._transformIsValid(indexed_transform) if self.strict and not self.valid: raise ValueError( f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" From 8e434b67d0b0cad52a7a51781dbaa592e6ca2e51 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 28 Jan 2025 11:28:04 -0800 Subject: [PATCH 471/505] fix: update cors origin for PR deployment --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index 00f93415..1434b5ee 100644 --- a/app.py +++ b/app.py @@ -77,7 +77,7 @@ def create_app(config_module=None, loglevel="INFO"): caching.cache.init_app(app) cors_origins = [ - "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://witty-mushroom-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://staging.app.fairhub.io", "https://app.fairhub.io", "https://staging.fairhub.io", From 073d622f284825472d9c98a6652b9aa72f1d01af Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 28 Jan 2025 11:29:47 -0800 Subject: [PATCH 472/505] fix: update cors origin --- app.py | 1 + 1 file changed, 1 insertion(+) diff --git a/app.py b/app.py index 1434b5ee..bb1ad060 100644 --- a/app.py +++ b/app.py @@ -78,6 +78,7 @@ def create_app(config_module=None, loglevel="INFO"): cors_origins = [ "https://witty-mushroom-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://staging.app.fairhub.io", "https://app.fairhub.io", "https://staging.fairhub.io", From beb1b9bffa7f05062c5ed677016302547b6852cd Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 28 Jan 2025 12:02:24 -0800 Subject: [PATCH 473/505] fix: disable the package mood in poetry --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index e1dffbd8..4719ef22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,4 +1,5 @@ [tool.poetry] +package-mode = false name = "fairhub-api" version = "0.1.0" From f962fda3d1e1f26cc1d45915690bbc9a3a73b7a6 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 28 Jan 2025 12:16:14 -0800 Subject: [PATCH 474/505] fix: location of package-mode --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4719ef22..a1b9d064 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,4 @@ [tool.poetry] -package-mode = false name = "fairhub-api" version = "0.1.0" @@ -172,7 +171,7 @@ quiet = true profile = "black" [tool.mypy] - +package-mode = false ignore_missing_imports = true no_implicit_optional = true check_untyped_defs = true From 6ced394dbbc669504293fb5ef809e2a753307f1a Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 28 Jan 2025 13:34:30 -0800 Subject: [PATCH 475/505] chore: update workflow --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 44975a75..29f12739 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -21,7 +21,7 @@ jobs: - uses: Gr1N/setup-poetry@v8 - name: Install dependencies - run: poetry install + run: poetry install --no-root - uses: actions/cache@v2 with: From 5827d541195440bdd5e0b3bc4fbce8bd3722e201 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 28 Jan 2025 13:35:42 -0800 Subject: [PATCH 476/505] ci: update workflow --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8ecc9bb4..80c95a55 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -29,7 +29,7 @@ jobs: - uses: Gr1N/setup-poetry@v8 - name: Install dependencies - run: poetry install + run: poetry install --no-root - uses: actions/cache@v2 with: From e2b43af31dffa93da141f633e9ab866203319340 Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Tue, 28 Jan 2025 13:36:15 -0800 Subject: [PATCH 477/505] ci: update poetry install --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index b25c9554..7e50e6a0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,7 +14,7 @@ RUN pip install "poetry==$POETRY_VERSION" COPY poetry.lock pyproject.toml ./ RUN poetry config virtualenvs.create false -RUN poetry install +RUN poetry install --no-root COPY apis ./apis COPY model ./model @@ -31,4 +31,4 @@ COPY entrypoint.sh . RUN chmod +x entrypoint.sh -ENTRYPOINT ["./entrypoint.sh"] \ No newline at end of file +ENTRYPOINT ["./entrypoint.sh"] From e15b98ed1b9f20a1813a9224a42c58d96d824676 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 28 Jan 2025 14:20:10 -0800 Subject: [PATCH 478/505] fix: syntax --- app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app.py b/app.py index bb1ad060..aff927fc 100644 --- a/app.py +++ b/app.py @@ -77,7 +77,7 @@ def create_app(config_module=None, loglevel="INFO"): caching.cache.init_app(app) cors_origins = [ - "https://witty-mushroom-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string + "https://witty-mushroom-.*-.*.centralus.4.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://staging.app.fairhub.io", "https://app.fairhub.io", From 6ef9b54210db70ba641b49212665bd7404c4fcad Mon Sep 17 00:00:00 2001 From: Sanjay Soundarajan Date: Wed, 5 Feb 2025 15:07:47 -0800 Subject: [PATCH 479/505] feat: add sessions (#59) * feat: add session table * feat: add session table * chore: add/remove session in logout/change password * chore: add session in login * fix: session removal logic * chore: convert user removal into function * style: format * feat: add logout test functions * feat: add logout test functions * style: format * style: format * style: format * fix: expiration nullability * fix: alembic table removed * style: format * style: format * fix: change token storage for sessions * style: format * fix: session remove for pass change * fix: remove alembic table * style: format * fix: check session * fix: session authentication * fix: change email verified type * fix: session extension time * chore: fix session time * style: format * fix: token session * fix: destroy schema * feat: u[date poetry --------- Co-authored-by: aydawka --- .../3ffefbd9c03b_email_verified_type.py | 33 + .../9698369d7a8c_create_session_table.py | 27 + apis/authentication.py | 68 +- app.py | 45 +- model/__init__.py | 2 + model/session.py | 36 + model/user.py | 1 + poetry.lock | 2939 +++++++++-------- tests/conftest.py | 6 + tests/functional/test_user.py | 104 +- 10 files changed, 1818 insertions(+), 1443 deletions(-) create mode 100644 alembic/versions/3ffefbd9c03b_email_verified_type.py create mode 100644 alembic/versions/9698369d7a8c_create_session_table.py create mode 100644 model/session.py diff --git a/alembic/versions/3ffefbd9c03b_email_verified_type.py b/alembic/versions/3ffefbd9c03b_email_verified_type.py new file mode 100644 index 00000000..b3792d95 --- /dev/null +++ b/alembic/versions/3ffefbd9c03b_email_verified_type.py @@ -0,0 +1,33 @@ +"""email verified type + +Revision ID: 3ffefbd9c03b +Revises: 9698369d7a8c +Create Date: 2024-07-01 12:28:02.596192 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '3ffefbd9c03b' +down_revision: Union[str, None] = '9698369d7a8c' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("user") as batch_op: + batch_op.alter_column( + "email_verified", + type_=sa.Boolean(), + postgresql_using="email_verified::boolean", + ) + + + + + + diff --git a/alembic/versions/9698369d7a8c_create_session_table.py b/alembic/versions/9698369d7a8c_create_session_table.py new file mode 100644 index 00000000..b092a8b4 --- /dev/null +++ b/alembic/versions/9698369d7a8c_create_session_table.py @@ -0,0 +1,27 @@ +"""create session table + +Revision ID: 9698369d7a8c +Revises: +Create Date: 2024-06-13 09:59:17.605666 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '9698369d7a8c' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + 'session', + sa.Column('id', sa.CHAR(36), primary_key=True), + sa.Column('user_id', sa.CHAR(36), sa.ForeignKey("user.id"), nullable=False), + sa.Column('expires_at', sa.BigInteger, nullable=False) + ) + diff --git a/apis/authentication.py b/apis/authentication.py index 10407d01..c6d11199 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -9,13 +9,13 @@ import uuid from datetime import timezone from typing import Any, Union +import time import jwt from email_validator import EmailNotValidError, validate_email from flask import g, make_response, request from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate - import model api = Namespace("Authentication", description="Authentication paths", path="/") @@ -233,24 +233,29 @@ def validate_is_valid_email(instance): # If not testing, directly use the 'config' module config = config_module + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( + minutes=180 + ) + jti = str(uuid.uuid4()) encoded_jwt_code = jwt.encode( { "user": user.id, - "exp": datetime.datetime.now(timezone.utc) - + datetime.timedelta(minutes=180), # noqa: W503 - "jti": str(uuid.uuid4()), + "exp": expired_in, + "jti": jti, + }, # noqa: W503 config.FAIRHUB_SECRET, algorithm="HS256", ) - resp = make_response(user.to_dict()) resp.set_cookie( "token", encoded_jwt_code, secure=True, httponly=True, samesite="None" ) - resp.status_code = 200 - + g.token = jti + added_session = model.Session.from_data(jti, expired_in.timestamp(), user) + model.db.session.add(added_session) + model.db.session.commit() return resp @@ -258,7 +263,7 @@ def authentication(): """it authenticates users to a study, sets access and refresh token. In addition, it handles error handling of expired token and non existed users""" g.user = None - + g.token = None if "token" not in request.cookies: return token: str = ( @@ -286,7 +291,20 @@ def authentication(): token_blacklist = model.TokenBlacklist.query.get(decoded["jti"]) if token_blacklist: return + # decode user user = model.User.query.get(decoded["user"]) + # decode session + session = model.Session.query.get(decoded["jti"]) + if not session: + g.user = None + g.token = None + return + + if session.expires_at < time.time(): + g.user = None + g.token = None + return + g.token = decoded["jti"] g.user = user @@ -397,6 +415,7 @@ class Logout(Resource): @api.response(400, "Validation Error") def post(self): """simply logges out user from the system""" + resp = make_response() resp.set_cookie( "token", @@ -407,6 +426,16 @@ def post(self): expires=datetime.datetime.now(timezone.utc), ) resp.status_code = 204 + + if g.user and g.token: + remove_session = ( + model.Session.query + .filter(model.Session.id == g.token) + .first() + ) + if remove_session: + model.db.session.delete(remove_session) + model.db.session.commit() return resp @@ -473,20 +502,21 @@ def confirm_new_password(instance): data: Union[Any, dict] = request.json user = model.User.query.get(g.user.id) + user.set_password(data["new_password"]) + model.db.session.commit() + session_logout() return "Password updated successfully", 200 -# @api.route("/auth/current-users") -# class CurrentUsers(Resource): -# """function is used to see all logged users in -# the system. For now, it is used for testing purposes""" +def session_logout(): + if g.user and g.token: + remove_sessions = model.Session.query.filter( + model.Session.user_id == g.user.id + ).all() -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def get(self): -# """returns all logged users in the system""" -# if not g.user: -# return None -# return g.user.to_dict() + for session in remove_sessions: + model.db.session.delete(session) + model.db.session.commit() + # return "Sessions are removed successfully", 200 diff --git a/app.py b/app.py index aff927fc..22d47b48 100644 --- a/app.py +++ b/app.py @@ -12,7 +12,7 @@ from flask_bcrypt import Bcrypt from flask_cors import CORS from growthbook import GrowthBook -from sqlalchemy import MetaData, inspect +from sqlalchemy import MetaData, inspect, text from sqlalchemy.ext.compiler import compiles from sqlalchemy.schema import DropTable from waitress import serve @@ -126,16 +126,6 @@ def create_schema(): with engine.begin(): model.db.create_all() - @app.cli.command("destroy-schema") - def destroy_schema(): - """Create the database schema.""" - # If DB is Azure, Skip - if config.FAIRHUB_DATABASE_URL.find("azure") > -1: - return - engine = model.db.session.get_bind() - with engine.begin(): - model.db.drop_all() - @app.cli.command("cycle-schema") def cycle_schema(): """Destroy then re-create the database schema.""" @@ -160,6 +150,17 @@ def list_schemas(): for schema_name in schema_names: print(schema_name) + @app.cli.command("destroy-schema") + def destroy_schema(): + """Create the database schema.""" + # If DB is Azure, Skip + if config.FAIRHUB_DATABASE_URL.find("azure") > -1: + return + engine = model.db.session.get_bind() + with engine.begin() as conn: + model.db.drop_all() + conn.execute(text("DROP TABLE IF EXISTS alembic_version")) # type: ignore + @app.cli.command("inspect-schema") @click.argument("schema") def inspect_schema(schema=None): @@ -222,7 +223,7 @@ def on_after_request(resp): if request.path.startswith(route): return resp - if "token" not in request.cookies: + if "token" not in request.cookies or not g.token: return resp token: str = request.cookies.get("token") or "" # type: ignore @@ -255,15 +256,22 @@ def on_after_request(resp): if token_blacklist: resp.delete_cookie("token") return resp + expired_in = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta( minutes=180 ) - new_token = jwt.encode( - {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, - config.FAIRHUB_SECRET, - algorithm="HS256", - ) - resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") + session = model.Session.query.get(g.token) + session_expires_at = datetime.datetime.fromtimestamp(session.expires_at, timezone.utc) + + if expired_in - session_expires_at < datetime.timedelta(minutes=90): + + new_token = jwt.encode( + {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, + config.FAIRHUB_SECRET, + algorithm="HS256", + ) + resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") + session.expires_at = expired_in.timestamp() app.logger.info("after request") app.logger.info(request.headers.get("Origin")) @@ -279,7 +287,6 @@ def on_after_request(resp): # ] = "Content-Type, Authorization, Access-Control-Allow-Origin, # Access-Control-Allow-Credentials" app.logger.info(resp.headers) - return resp @app.errorhandler(ValidationException) diff --git a/model/__init__.py b/model/__init__.py index 33959375..204f4568 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -48,6 +48,7 @@ from .user import User from .user_details import UserDetails from .version import Version +from .session import Session from .version_readme import VersionReadme __all__ = [ @@ -102,4 +103,5 @@ "UserDetails", "Notification", "VersionReadme", + "Session", ] diff --git a/model/session.py b/model/session.py new file mode 100644 index 00000000..e26ea7e2 --- /dev/null +++ b/model/session.py @@ -0,0 +1,36 @@ +from . import User + +from .db import db + + +class Session(db.Model): # type: ignore + def __init__(self, id, user: User): # pylint: disable=redefined-builtin + self.id = id + self.user = user + + __tablename__ = "session" + id = db.Column(db.CHAR(36), primary_key=True) + expires_at = db.Column(db.BigInteger, nullable=False) + + user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) + + user = db.relationship( + "User", + back_populates="session", + ) + + def to_dict(self): + return { + "id": self.id, + "expires_at": self.expires_at, + "user_id": self.user_id, + } + + @staticmethod + def from_data(id, expires_at, user: User): # pylint: disable=redefined-builtin + session = Session(id, user) + session.update(expires_at) + return session + + def update(self, expires_at): + self.expires_at = expires_at diff --git a/model/user.py b/model/user.py index 9e741efc..c27a1419 100644 --- a/model/user.py +++ b/model/user.py @@ -29,6 +29,7 @@ def __init__(self, password): user_details = db.relationship("UserDetails", uselist=False, back_populates="user") token_blacklist = db.relationship("TokenBlacklist", back_populates="user") notification = db.relationship("Notification", back_populates="user") + session = db.relationship("Session", back_populates="user") def to_dict(self): return { diff --git a/poetry.lock b/poetry.lock index 7b826e9c..fb4fa708 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,14 +2,14 @@ [[package]] name = "alembic" -version = "1.13.1" +version = "1.14.1" description = "A database migration tool for SQLAlchemy." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, - {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, + {file = "alembic-1.14.1-py3-none-any.whl", hash = "sha256:1acdd7a3a478e208b0503cd73614d5e4c6efafa4e73518bb60e4f2846a37b1c5"}, + {file = "alembic-1.14.1.tar.gz", hash = "sha256:496e888245a53adf1498fcab31713a469c65836f8de76e01399aa1c3e90dd213"}, ] [package.dependencies] @@ -18,18 +18,18 @@ SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" [package.extras] -tz = ["backports.zoneinfo"] +tz = ["backports.zoneinfo", "tzdata"] [[package]] name = "aniso8601" -version = "9.0.1" +version = "10.0.0" description = "A library for parsing ISO 8601 strings." category = "main" optional = false python-versions = "*" files = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, + {file = "aniso8601-10.0.0-py2.py3-none-any.whl", hash = "sha256:3c943422efaa0229ebd2b0d7d223effb5e7c89e24d2267ebe76c61a2d8e290cb"}, + {file = "aniso8601-10.0.0.tar.gz", hash = "sha256:ff1d0fc2346688c62c0151547136ac30e322896ed8af316ef7602c47da9426cf"}, ] [package.extras] @@ -37,26 +37,26 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "4.2.0" +version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, - {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "appnope" @@ -151,14 +151,14 @@ test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "py [[package]] name = "art" -version = "6.1" +version = "6.4" description = "ASCII Art Library For Python" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "art-6.1-py3-none-any.whl", hash = "sha256:159819c418001467f8d79616fa0814277deac97c8a363d1eb3e7c0a31526bfc3"}, - {file = "art-6.1.tar.gz", hash = "sha256:6ab3031e3b7710039e73497b0e750cadfe04d4c1279ce3a123500dbafb9e1b64"}, + {file = "art-6.4-py3-none-any.whl", hash = "sha256:4e58b6f0a0bb8574efb311eff24bdd28bf889c0c526ccbbb5410c644340a301c"}, + {file = "art-6.4.tar.gz", hash = "sha256:417fea674bff8cea7ed058291ad1b81a6032dfce5152f28e629fa4a798a2c14c"}, ] [package.extras] @@ -186,22 +186,19 @@ wrapt = [ [[package]] name = "asttokens" -version = "2.4.1" +version = "3.0.0" description = "Annotate AST trees with source code positions" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, ] -[package.dependencies] -six = ">=1.12.0" - [package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "async-lru" @@ -220,46 +217,46 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] name = "attrs" -version = "23.2.0" +version = "25.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "azure-core" -version = "1.30.1" +version = "1.32.0" description = "Microsoft Azure Core Library for Python" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "azure-core-1.30.1.tar.gz", hash = "sha256:26273a254131f84269e8ea4464f3560c731f29c0c1f69ac99010845f239c1a8f"}, - {file = "azure_core-1.30.1-py3-none-any.whl", hash = "sha256:7c5ee397e48f281ec4dd773d67a0a47a0962ed6fa833036057f9ea067f688e74"}, + {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, + {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, ] [package.dependencies] @@ -272,75 +269,73 @@ aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-storage-blob" -version = "12.19.1" +version = "12.24.1" description = "Microsoft Azure Blob Storage Client Library for Python" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "azure-storage-blob-12.19.1.tar.gz", hash = "sha256:13e16ba42fc54ac2c7e8f976062173a5c82b9ec0594728e134aac372965a11b0"}, - {file = "azure_storage_blob-12.19.1-py3-none-any.whl", hash = "sha256:c5530dc51c21c9564e4eb706cd499befca8819b10dd89716d3fc90d747556243"}, + {file = "azure_storage_blob-12.24.1-py3-none-any.whl", hash = "sha256:77fb823fdbac7f3c11f7d86a5892e2f85e161e8440a7489babe2195bf248f09e"}, + {file = "azure_storage_blob-12.24.1.tar.gz", hash = "sha256:052b2a1ea41725ba12e2f4f17be85a54df1129e13ea0321f5a2fcc851cbf47d4"}, ] [package.dependencies] -azure-core = ">=1.28.0,<2.0.0" +azure-core = ">=1.30.0" cryptography = ">=2.1.4" isodate = ">=0.6.1" -typing-extensions = ">=4.3.0" +typing-extensions = ">=4.6.0" [package.extras] -aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] +aio = ["azure-core[aio] (>=1.30.0)"] [[package]] name = "babel" -version = "2.14.0" +version = "2.17.0" description = "Internationalization utilities" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "bcrypt" -version = "4.1.2" +version = "4.2.1" description = "Modern password hashing for your software and your servers" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, - {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, - {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, - {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, - {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, - {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, - {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, - {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, - {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, - {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, - {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, - {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, - {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, - {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, - {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, + {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"}, + {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"}, + {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"}, + {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"}, + {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"}, + {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"}, + {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"}, ] [package.extras] @@ -349,18 +344,19 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.12.3" +version = "4.13.3" description = "Screen-scraping library" category = "dev" optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.7.0" files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, + {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, + {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, ] [package.dependencies] soupsieve = ">1.2" +typing-extensions = ">=4.0.0" [package.extras] cchardet = ["cchardet"] @@ -418,33 +414,33 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bleach" -version = "6.1.0" +version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, + {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, + {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, ] [package.dependencies] -six = ">=1.9.0" +tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] +css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "blinker" -version = "1.7.0" +version = "1.9.0" description = "Fast, simple object-to-object and broadcast signaling" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, - {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, + {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, + {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, ] [[package]] @@ -461,76 +457,91 @@ files = [ [[package]] name = "certifi" -version = "2024.2.2" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -538,114 +549,116 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -665,14 +678,14 @@ files = [ [[package]] name = "comm" -version = "0.2.1" +version = "0.2.2" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, - {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, ] [package.dependencies] @@ -683,64 +696,74 @@ test = ["pytest"] [[package]] name = "coverage" -version = "7.4.1" +version = "7.6.10" description = "Code coverage measurement for Python" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, ] [package.dependencies] @@ -751,14 +774,14 @@ toml = ["tomli"] [[package]] name = "coveragespace" -version = "6.0.2" +version = "6.1" description = "A place to track your code coverage metrics." category = "dev" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "coveragespace-6.0.2-py3-none-any.whl", hash = "sha256:e4900e6eac340d2ea8b6f831a75984392eb9090f9d811c94b7f4c9496214d9b5"}, - {file = "coveragespace-6.0.2.tar.gz", hash = "sha256:07ff131408bcd37cfe3b142092a289d949102d36221e40f066a557121be7cae7"}, + {file = "coveragespace-6.1-py3-none-any.whl", hash = "sha256:ca6ccd5eb32eb6ce5fe78de6c052353b9fbb378a886fde0838480defe33406a8"}, + {file = "coveragespace-6.1.tar.gz", hash = "sha256:049c0b7b629ad43d72692f0f99b9f8a97936ad596f7f27c1af61323fba90ebef"}, ] [package.dependencies] @@ -810,34 +833,38 @@ test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)" [[package]] name = "debugpy" -version = "1.8.1" +version = "1.8.12" description = "An implementation of the Debug Adapter Protocol for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, - {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, - {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, - {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, - {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, - {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, - {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, - {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, - {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, - {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, - {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, - {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, - {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, - {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, - {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, - {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, - {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, - {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, - {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, - {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, - {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, - {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, + {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, + {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, + {file = "debugpy-1.8.12-cp310-cp310-win32.whl", hash = "sha256:b202f591204023b3ce62ff9a47baa555dc00bb092219abf5caf0e3718ac20e7c"}, + {file = "debugpy-1.8.12-cp310-cp310-win_amd64.whl", hash = "sha256:9649eced17a98ce816756ce50433b2dd85dfa7bc92ceb60579d68c053f98dff9"}, + {file = "debugpy-1.8.12-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:36f4829839ef0afdfdd208bb54f4c3d0eea86106d719811681a8627ae2e53dd5"}, + {file = "debugpy-1.8.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a28ed481d530e3138553be60991d2d61103ce6da254e51547b79549675f539b7"}, + {file = "debugpy-1.8.12-cp311-cp311-win32.whl", hash = "sha256:4ad9a94d8f5c9b954e0e3b137cc64ef3f579d0df3c3698fe9c3734ee397e4abb"}, + {file = "debugpy-1.8.12-cp311-cp311-win_amd64.whl", hash = "sha256:4703575b78dd697b294f8c65588dc86874ed787b7348c65da70cfc885efdf1e1"}, + {file = "debugpy-1.8.12-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:7e94b643b19e8feb5215fa508aee531387494bf668b2eca27fa769ea11d9f498"}, + {file = "debugpy-1.8.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086b32e233e89a2740c1615c2f775c34ae951508b28b308681dbbb87bba97d06"}, + {file = "debugpy-1.8.12-cp312-cp312-win32.whl", hash = "sha256:2ae5df899732a6051b49ea2632a9ea67f929604fd2b036613a9f12bc3163b92d"}, + {file = "debugpy-1.8.12-cp312-cp312-win_amd64.whl", hash = "sha256:39dfbb6fa09f12fae32639e3286112fc35ae976114f1f3d37375f3130a820969"}, + {file = "debugpy-1.8.12-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:696d8ae4dff4cbd06bf6b10d671e088b66669f110c7c4e18a44c43cf75ce966f"}, + {file = "debugpy-1.8.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:898fba72b81a654e74412a67c7e0a81e89723cfe2a3ea6fcd3feaa3395138ca9"}, + {file = "debugpy-1.8.12-cp313-cp313-win32.whl", hash = "sha256:22a11c493c70413a01ed03f01c3c3a2fc4478fc6ee186e340487b2edcd6f4180"}, + {file = "debugpy-1.8.12-cp313-cp313-win_amd64.whl", hash = "sha256:fdb3c6d342825ea10b90e43d7f20f01535a72b3a1997850c0c3cefa5c27a4a2c"}, + {file = "debugpy-1.8.12-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:b0232cd42506d0c94f9328aaf0d1d0785f90f87ae72d9759df7e5051be039738"}, + {file = "debugpy-1.8.12-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9af40506a59450f1315168d47a970db1a65aaab5df3833ac389d2899a5d63b3f"}, + {file = "debugpy-1.8.12-cp38-cp38-win32.whl", hash = "sha256:5cc45235fefac57f52680902b7d197fb2f3650112379a6fa9aa1b1c1d3ed3f02"}, + {file = "debugpy-1.8.12-cp38-cp38-win_amd64.whl", hash = "sha256:557cc55b51ab2f3371e238804ffc8510b6ef087673303890f57a24195d096e61"}, + {file = "debugpy-1.8.12-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:b5c6c967d02fee30e157ab5227706f965d5c37679c687b1e7bbc5d9e7128bd41"}, + {file = "debugpy-1.8.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a77f422f31f170c4b7e9ca58eae2a6c8e04da54121900651dfa8e66c29901a"}, + {file = "debugpy-1.8.12-cp39-cp39-win32.whl", hash = "sha256:a4042edef80364239f5b7b5764e55fd3ffd40c32cf6753da9bda4ff0ac466018"}, + {file = "debugpy-1.8.12-cp39-cp39-win_amd64.whl", hash = "sha256:f30b03b0f27608a0b26c75f0bb8a880c752c0e0b01090551b9d87c7d783e2069"}, + {file = "debugpy-1.8.12-py2.py3-none-any.whl", hash = "sha256:274b6a2040349b5c9864e475284bce5bb062e63dce368a394b8cc865ae3b00c6"}, + {file = "debugpy-1.8.12.tar.gz", hash = "sha256:646530b04f45c830ceae8e491ca1c9320a2d2f0efea3141487c82130aba70dce"}, ] [[package]] @@ -878,14 +905,14 @@ files = [ [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -894,23 +921,23 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "dnspython" -version = "2.5.0" +version = "2.7.0" description = "DNS toolkit" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "dnspython-2.5.0-py3-none-any.whl", hash = "sha256:6facdf76b73c742ccf2d07add296f178e629da60be23ce4b0a9c927b1e02c3a6"}, - {file = "dnspython-2.5.0.tar.gz", hash = "sha256:a0034815a59ba9ae888946be7ccca8f7c157b286f8455b379c692efb51022a15"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=5.0.3)", "mypy (>=1.0.1)", "pylint (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "sphinx (>=7.0.0)", "twine (>=4.0.0)", "wheel (>=0.41.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=0.17.3)", "httpx (>=0.25.1)"] -doq = ["aioquic (>=0.9.20)"] -idna = ["idna (>=2.1)"] -trio = ["trio (>=0.14)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] [[package]] @@ -926,14 +953,14 @@ files = [ [[package]] name = "email-validator" -version = "2.1.0.post1" +version = "2.2.0" description = "A robust email address syntax and deliverability validation library." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "email_validator-2.1.0.post1-py3-none-any.whl", hash = "sha256:c973053efbeddfef924dc0bd93f6e77a1ea7ee0fce935aea7103c7a3d6d2d637"}, - {file = "email_validator-2.1.0.post1.tar.gz", hash = "sha256:a4b0bd1cf55f073b924258d19321b1f3aa74b4b5a71a42c305575dba920e1a44"}, + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, ] [package.dependencies] @@ -942,14 +969,14 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -957,14 +984,14 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.1" +version = "2.2.0" description = "Get the currently executing AST node of a frame, and other information" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, + {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, ] [package.extras] @@ -987,14 +1014,14 @@ python-dateutil = ">=2.4" [[package]] name = "fastjsonschema" -version = "2.19.1" +version = "2.21.1" description = "Fastest Python implementation of JSON schema" category = "dev" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, + {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, + {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, ] [package.extras] @@ -1058,14 +1085,14 @@ Flask = "*" [[package]] name = "flask-caching" -version = "2.1.0" +version = "2.3.0" description = "Adds caching support to Flask applications." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Flask-Caching-2.1.0.tar.gz", hash = "sha256:b7500c145135836a952e3de3a80881d9654e327a29c852c9265607f5c449235c"}, - {file = "Flask_Caching-2.1.0-py3-none-any.whl", hash = "sha256:f02645a629a8c89800d96dc8f690a574a0d49dcd66c7536badc6d362ba46b716"}, + {file = "Flask_Caching-2.3.0-py3-none-any.whl", hash = "sha256:51771c75682e5abc1483b78b96d9131d7941dc669b073852edfa319dd4e29b6e"}, + {file = "flask_caching-2.3.0.tar.gz", hash = "sha256:d7e4ca64a33b49feb339fcdd17e6ba25f5e01168cf885e53790e885f83a4d2cf"}, ] [package.dependencies] @@ -1074,14 +1101,14 @@ Flask = "*" [[package]] name = "flask-cors" -version = "4.0.0" +version = "4.0.2" description = "A Flask extension adding a decorator for CORS support" category = "main" optional = false python-versions = "*" files = [ - {file = "Flask-Cors-4.0.0.tar.gz", hash = "sha256:f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0"}, - {file = "Flask_Cors-4.0.0-py2.py3-none-any.whl", hash = "sha256:bc3492bfd6368d27cfe79c7821df5a8a319e1a6d5eab277a3794be19bdc51783"}, + {file = "Flask_Cors-4.0.2-py2.py3-none-any.whl", hash = "sha256:38364faf1a7a5d0a55bd1d2e2f83ee9e359039182f5e6a029557e1f56d92c09a"}, + {file = "flask_cors-4.0.2.tar.gz", hash = "sha256:493b98e2d1e2f1a4720a7af25693ef2fe32fbafec09a2f72c59f3e475eda61d2"}, ] [package.dependencies] @@ -1157,14 +1184,14 @@ files = [ [[package]] name = "freezegun" -version = "1.4.0" +version = "1.5.1" description = "Let your Python tests travel through time" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, - {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, + {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, + {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, ] [package.dependencies] @@ -1190,70 +1217,85 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.1.1" description = "Lightweight in-process concurrent programming" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, ] [package.extras] @@ -1262,14 +1304,14 @@ test = ["objgraph", "psutil"] [[package]] name = "growthbook" -version = "1.0.0" +version = "1.1.0" description = "Powerful Feature flagging and A/B testing for Python apps" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "growthbook-1.0.0-py2.py3-none-any.whl", hash = "sha256:919acf8b543bd0f7696626006d2bc2aeb818bfa7b63953e6fb6b597cd2b46a43"}, - {file = "growthbook-1.0.0.tar.gz", hash = "sha256:465b9dd370a3a6dbad75b12558646d6c51e7926f311b7ad74fb3dfc76e1eb4ca"}, + {file = "growthbook-1.1.0-py2.py3-none-any.whl", hash = "sha256:4e74345d81172ea58d0a5e348218fbb4c6a2f218144ee139efbec444c18a6fc2"}, + {file = "growthbook-1.1.0.tar.gz", hash = "sha256:56cc27df0f241491c27efe18b510833bb8d44e8004038ebc30db2bd5c598e879"}, ] [package.dependencies] @@ -1291,14 +1333,14 @@ files = [ [[package]] name = "httpcore" -version = "1.0.3" +version = "1.0.7" description = "A minimal low-level HTTP client." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, - {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -1309,18 +1351,18 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (>=1.0.0,<2.0.0)"] -trio = ["trio (>=0.22.0,<0.24.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.26.0" +version = "0.28.1" description = "The next generation HTTP client." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, - {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -1328,61 +1370,72 @@ anyio = "*" certifi = "*" httpcore = ">=1.0.0,<2.0.0" idna = "*" -sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (>=1.0.0,<2.0.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.6" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "7.0.1" +version = "8.6.1" description = "Read metadata from Python packages" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "importlib-resources" -version = "6.1.1" +version = "6.5.2" description = "Read resources from Python packages" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "importlib_resources-6.1.1-py3-none-any.whl", hash = "sha256:e8bf90d8213b486f428c9c39714b920041cb02c184686a3dee24905aaa8105d6"}, - {file = "importlib_resources-6.1.1.tar.gz", hash = "sha256:3893a00122eafde6894c59914446a512f728a0c1a45f9bb9b63721b6bacf0b4a"}, + {file = "importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec"}, + {file = "importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -1398,14 +1451,14 @@ files = [ [[package]] name = "ipykernel" -version = "6.29.2" +version = "6.29.5" description = "IPython Kernel for Jupyter" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.29.2-py3-none-any.whl", hash = "sha256:50384f5c577a260a1d53f1f59a828c7266d321c9b7d00d345693783f66616055"}, - {file = "ipykernel-6.29.2.tar.gz", hash = "sha256:3bade28004e3ff624ed57974948116670604ac5f676d12339693f3142176d3f0"}, + {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, + {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, ] [package.dependencies] @@ -1428,18 +1481,18 @@ cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (==0.23.4)", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" -version = "8.21.0" +version = "8.32.0" description = "IPython: Productive Interactive Computing" category = "dev" optional = false python-versions = ">=3.10" files = [ - {file = "ipython-8.21.0-py3-none-any.whl", hash = "sha256:1050a3ab8473488d7eee163796b02e511d0735cf43a04ba2a8348bd0f2eaf8a5"}, - {file = "ipython-8.21.0.tar.gz", hash = "sha256:48fbc236fbe0e138b88773fa0437751f14c3645fb483f1d4c5dee58b37e5ce73"}, + {file = "ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa"}, + {file = "ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251"}, ] [package.dependencies] @@ -1448,62 +1501,61 @@ decorator = "*" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -prompt-toolkit = ">=3.0.41,<3.1.0" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" +stack_data = "*" +traitlets = ">=5.13.0" +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] kernel = ["ipykernel"] +matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath", "trio"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] [[package]] name = "ipywidgets" -version = "8.1.2" +version = "8.1.5" description = "Jupyter interactive widgets" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, - {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, + {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"}, + {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.10,<3.1.0" +jupyterlab-widgets = ">=3.0.12,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.10,<4.1.0" +widgetsnbextension = ">=4.0.12,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] [[package]] name = "isodate" -version = "0.6.1" +version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, ] -[package.dependencies] -six = "*" - [[package]] name = "isoduration" version = "20.11.0" @@ -1536,46 +1588,46 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "itsdangerous" -version = "2.1.2" +version = "2.2.0" description = "Safely pass data to untrusted environments and back." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, - {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, ] [[package]] name = "jedi" -version = "0.19.1" +version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, ] [package.dependencies] -parso = ">=0.8.3,<0.9.0" +parso = ">=0.8.4,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.5" description = "A very fast and expressive template engine." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -1586,41 +1638,41 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "json5" -version = "0.9.14" +version = "0.10.0" description = "A Python implementation of the JSON5 data format." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8.0" files = [ - {file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f"}, - {file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02"}, + {file = "json5-0.10.0-py3-none-any.whl", hash = "sha256:19b23410220a7271e8377f81ba8aacba2fdd56947fbb137ee5977cbe1f5e8dfa"}, + {file = "json5-0.10.0.tar.gz", hash = "sha256:e66941c8f0a02026943c52c2eb34ebeb2a6f819a0be05920a6f5243cd30fd559"}, ] [package.extras] -dev = ["hypothesis"] +dev = ["build (==1.2.2.post1)", "coverage (==7.5.3)", "mypy (==1.13.0)", "pip (==24.3.1)", "pylint (==3.2.3)", "ruff (==0.7.3)", "twine (==5.1.1)", "uv (==0.5.1)"] [[package]] name = "jsonpointer" -version = "2.4" +version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +python-versions = ">=3.7" files = [ - {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, ] [[package]] name = "jsonschema" -version = "4.21.1" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, - {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] @@ -1635,22 +1687,22 @@ rfc3339-validator = {version = "*", optional = true, markers = "extra == \"forma rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2024.10.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, ] [package.dependencies] @@ -1658,35 +1710,34 @@ referencing = ">=0.31.0" [[package]] name = "jupyter" -version = "1.0.0" +version = "1.1.1" description = "Jupyter metapackage. Install all the Jupyter components in one go." category = "dev" optional = false python-versions = "*" files = [ - {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, - {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, - {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, + {file = "jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83"}, + {file = "jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a"}, ] [package.dependencies] ipykernel = "*" ipywidgets = "*" jupyter-console = "*" +jupyterlab = "*" nbconvert = "*" notebook = "*" -qtconsole = "*" [[package]] name = "jupyter-client" -version = "8.6.0" +version = "8.6.3" description = "Jupyter protocol implementation and client libraries" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, - {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, + {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, + {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, ] [package.dependencies] @@ -1698,7 +1749,7 @@ traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-console" @@ -1727,14 +1778,14 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.7.1" +version = "5.7.2" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, - {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, ] [package.dependencies] @@ -1744,22 +1795,23 @@ traitlets = ">=5.3" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" -version = "0.9.0" +version = "0.12.0" description = "Jupyter Event System library" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jupyter_events-0.9.0-py3-none-any.whl", hash = "sha256:d853b3c10273ff9bc8bb8b30076d65e2c9685579db736873de6c2232dde148bf"}, - {file = "jupyter_events-0.9.0.tar.gz", hash = "sha256:81ad2e4bc710881ec274d31c6c50669d71bbaa5dd9d01e600b56faa85700d399"}, + {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, + {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, ] [package.dependencies] jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +packaging = "*" python-json-logger = ">=2.0.4" pyyaml = ">=5.3" referencing = "*" @@ -1769,19 +1821,19 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8)", "sphinxcontrib-spelling"] test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] [[package]] name = "jupyter-lsp" -version = "2.2.2" +version = "2.2.5" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter-lsp-2.2.2.tar.gz", hash = "sha256:256d24620542ae4bba04a50fc1f6ffe208093a07d8e697fea0a8d1b8ca1b7e5b"}, - {file = "jupyter_lsp-2.2.2-py3-none-any.whl", hash = "sha256:3b95229e4168355a8c91928057c1621ac3510ba98b2a925e82ebd77f078b1aa5"}, + {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, + {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, ] [package.dependencies] @@ -1789,51 +1841,51 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.12.5" +version = "2.15.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "jupyter_server-2.12.5-py3-none-any.whl", hash = "sha256:184a0f82809a8522777cfb6b760ab6f4b1bb398664c5860a27cec696cb884923"}, - {file = "jupyter_server-2.12.5.tar.gz", hash = "sha256:0edb626c94baa22809be1323f9770cf1c00a952b17097592e40d03e6a3951689"}, + {file = "jupyter_server-2.15.0-py3-none-any.whl", hash = "sha256:872d989becf83517012ee669f09604aa4a28097c0bd90b2f424310156c2cdae3"}, + {file = "jupyter_server-2.15.0.tar.gz", hash = "sha256:9d446b8697b4f7337a1b7cdcac40778babdd93ba614b6d68ab1c0c918f1c4084"}, ] [package.dependencies] anyio = ">=3.1.0" -argon2-cffi = "*" -jinja2 = "*" +argon2-cffi = ">=21.1" +jinja2 = ">=3.0.3" jupyter-client = ">=7.4.4" jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" -jupyter-events = ">=0.9.0" -jupyter-server-terminals = "*" +jupyter-events = ">=0.11.0" +jupyter-server-terminals = ">=0.4.4" nbconvert = ">=6.4.4" nbformat = ">=5.3.0" -overrides = "*" -packaging = "*" -prometheus-client = "*" -pywinpty = {version = "*", markers = "os_name == \"nt\""} +overrides = ">=5.0" +packaging = ">=22.0" +prometheus-client = ">=0.9" +pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} pyzmq = ">=24" send2trash = ">=1.8.2" terminado = ">=0.8.3" tornado = ">=6.2.0" traitlets = ">=5.6.0" -websocket-client = "*" +websocket-client = ">=1.7" [package.extras] -docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] -test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.4)", "pytest-timeout", "requests"] +docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] [[package]] name = "jupyter-server-terminals" -version = "0.5.2" +version = "0.5.3" description = "A Jupyter Server Extension Providing Terminals." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_server_terminals-0.5.2-py3-none-any.whl", hash = "sha256:1b80c12765da979513c42c90215481bbc39bd8ae7c0350b4f85bc3eb58d0fa80"}, - {file = "jupyter_server_terminals-0.5.2.tar.gz", hash = "sha256:396b5ccc0881e550bf0ee7012c6ef1b53edbde69e67cab1d56e89711b46052e8"}, + {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, + {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, ] [package.dependencies] @@ -1846,36 +1898,38 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.1.1" +version = "4.3.5" description = "JupyterLab computational environment" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.1.1-py3-none-any.whl", hash = "sha256:fa3e8c18b804eac04e51ceebd9dd3dd396e08106816f0d09cc426799d7087632"}, - {file = "jupyterlab-4.1.1.tar.gz", hash = "sha256:8acc9f561729d8f32c14c294c397917cddfeeb13a5d46f811979b71b4911a9fd"}, + {file = "jupyterlab-4.3.5-py3-none-any.whl", hash = "sha256:571bbdee20e4c5321ab5195bc41cf92a75a5cff886be5e57ce78dfa37a5e9fdb"}, + {file = "jupyterlab-4.3.5.tar.gz", hash = "sha256:c779bf72ced007d7d29d5bcef128e7fdda96ea69299e19b04a43635a7d641f9d"}, ] [package.dependencies] async-lru = ">=1.0.0" httpx = ">=0.25.0" -ipykernel = "*" +ipykernel = ">=6.5.0" jinja2 = ">=3.0.3" jupyter-core = "*" jupyter-lsp = ">=2.0.0" jupyter-server = ">=2.4.0,<3" -jupyterlab-server = ">=2.19.0,<3" +jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2" packaging = "*" -tomli = {version = "*", markers = "python_version < \"3.11\""} +setuptools = ">=40.8.0" +tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.2.0)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.2.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.1)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post6)", "matplotlib (==3.8.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.0)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.6.9)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<8.1.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.4.1)", "ipython (==8.16.1)", "ipywidgets (==8.1.5)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.2.post3)", "matplotlib (==3.9.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.3)", "scipy (==1.14.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] +upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] [[package]] name = "jupyterlab-pygments" @@ -1891,14 +1945,14 @@ files = [ [[package]] name = "jupyterlab-server" -version = "2.25.3" +version = "2.27.3" description = "A set of server components for JupyterLab and JupyterLab like applications." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab_server-2.25.3-py3-none-any.whl", hash = "sha256:c48862519fded9b418c71645d85a49b2f0ec50d032ba8316738e9276046088c1"}, - {file = "jupyterlab_server-2.25.3.tar.gz", hash = "sha256:846f125a8a19656611df5b03e5912c8393cea6900859baa64fa515eb64a8dc40"}, + {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, + {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, ] [package.dependencies] @@ -1913,18 +1967,18 @@ requests = ">=2.31" [package.extras] docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] -test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] [[package]] name = "jupyterlab-widgets" -version = "3.0.10" +version = "3.0.13" description = "Jupyter interactive widgets for JupyterLab" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, - {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, + {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, + {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, ] [[package]] @@ -1976,14 +2030,14 @@ files = [ [[package]] name = "mako" -version = "1.3.2" +version = "1.3.9" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, - {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, + {file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"}, + {file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"}, ] [package.dependencies] @@ -2011,84 +2065,85 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "matplotlib-inline" -version = "0.1.6" +version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, ] [package.dependencies] @@ -2120,28 +2175,31 @@ files = [ [[package]] name = "minilog" -version = "2.3" +version = "2.3.1" description = "Minimalistic wrapper for Python logging." category = "main" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "minilog-2.3-py3-none-any.whl", hash = "sha256:e42dc2def1da424e90d6664279c128dde94adc5840557b27857957ed23ee09b6"}, - {file = "minilog-2.3.tar.gz", hash = "sha256:ebdf354f1dd86a2e8a824cdde4b7b50cdbc24b99a5465bc4d1806bd1e030bc92"}, + {file = "minilog-2.3.1-py3-none-any.whl", hash = "sha256:1a679fefe6140ce1d59c3246adc991f9eb480169e5a6c54d2be9023ee459dc30"}, + {file = "minilog-2.3.1.tar.gz", hash = "sha256:4b602572c3bcdd2d8f00d879f635c0de9e632d5d0307e131c91074be8acf444e"}, ] [[package]] name = "mistune" -version = "3.0.2" +version = "3.1.1" description = "A sane and fast Markdown parser with useful plugins and renderers" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, + {file = "mistune-3.1.1-py3-none-any.whl", hash = "sha256:02106ac2aa4f66e769debbfa028509a275069dcffce0dfa578edd7b991ee700a"}, + {file = "mistune-3.1.1.tar.gz", hash = "sha256:e0740d635f515119f7d1feb6f9b192ee60f0cc649f80a8f944f905706a21654c"}, ] +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.11\""} + [[package]] name = "mkdocs" version = "1.3.1" @@ -2171,48 +2229,60 @@ i18n = ["babel (>=2.9.0)"] [[package]] name = "mypy" -version = "1.8.0" +version = "1.14.1" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, + {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, + {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, + {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, + {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, + {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, + {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, + {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, + {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, + {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, + {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, + {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, + {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, + {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, + {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" +mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -2231,14 +2301,14 @@ files = [ [[package]] name = "nbclient" -version = "0.9.0" +version = "0.10.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "dev" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" files = [ - {file = "nbclient-0.9.0-py3-none-any.whl", hash = "sha256:a3a1ddfb34d4a9d17fc744d655962714a866639acd30130e9be84191cd97cd15"}, - {file = "nbclient-0.9.0.tar.gz", hash = "sha256:4b28c207877cf33ef3a9838cdc7a54c5ceff981194a82eac59d558f05487295e"}, + {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, + {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, ] [package.dependencies] @@ -2249,24 +2319,24 @@ traitlets = ">=5.4" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] +docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "7.16.0" -description = "Converting Jupyter Notebooks" +version = "7.16.6" +description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nbconvert-7.16.0-py3-none-any.whl", hash = "sha256:ad3dc865ea6e2768d31b7eb6c7ab3be014927216a5ece3ef276748dd809054c7"}, - {file = "nbconvert-7.16.0.tar.gz", hash = "sha256:813e6553796362489ae572e39ba1bff978536192fb518e10826b0e8cadf03ec8"}, + {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, + {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, ] [package.dependencies] beautifulsoup4 = "*" -bleach = "!=5.0.0" +bleach = {version = "!=5.0.0", extras = ["css"]} defusedxml = "*" jinja2 = ">=3.0" jupyter-core = ">=4.7" @@ -2278,34 +2348,33 @@ nbformat = ">=5.7" packaging = "*" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" -tinycss2 = "*" traitlets = ">=5.1" [package.extras] -all = ["nbconvert[docs,qtpdf,serve,test,webpdf]"] +all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] -qtpdf = ["nbconvert[qtpng]"] +qtpdf = ["pyqtwebengine (>=5.15)"] qtpng = ["pyqtwebengine (>=5.15)"] serve = ["tornado (>=6.1)"] -test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] webpdf = ["playwright"] [[package]] name = "nbformat" -version = "5.9.2" +version = "5.10.4" description = "The Jupyter Notebook format" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "nbformat-5.9.2-py3-none-any.whl", hash = "sha256:1c5172d786a41b82bcfd0c23f9e6b6f072e8fb49c39250219e4acfff1efe89e9"}, - {file = "nbformat-5.9.2.tar.gz", hash = "sha256:5f98b5ba1997dff175e77e0c17d5c10a96eaed2cbd1de3533d1fc35d5e111192"}, + {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, + {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, ] [package.dependencies] -fastjsonschema = "*" +fastjsonschema = ">=2.15" jsonschema = ">=2.6" -jupyter-core = "*" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" traitlets = ">=5.1" [package.extras] @@ -2326,27 +2395,27 @@ files = [ [[package]] name = "notebook" -version = "7.1.0" +version = "7.3.2" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.1.0-py3-none-any.whl", hash = "sha256:a8fa4ccb5e5fe220f29d9900337efd7752bc6f2efe004d6f320db01f7743adc9"}, - {file = "notebook-7.1.0.tar.gz", hash = "sha256:99caf01ff166b1cc86355c9b37c1ba9bf566c1d7fc4ab57bb6f8f24e36c4260e"}, + {file = "notebook-7.3.2-py3-none-any.whl", hash = "sha256:e5f85fc59b69d3618d73cf27544418193ff8e8058d5bf61d315ce4f473556288"}, + {file = "notebook-7.3.2.tar.gz", hash = "sha256:705e83a1785f45b383bf3ee13cb76680b92d24f56fb0c7d2136fe1d850cd3ca8"}, ] [package.dependencies] jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.1.1,<4.2" -jupyterlab-server = ">=2.22.1,<3" +jupyterlab = ">=4.3.4,<4.4" +jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2,<0.3" tornado = ">=6.2.0" [package.extras] dev = ["hatch", "pre-commit"] docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] [[package]] name = "notebook-shim" @@ -2426,60 +2495,73 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pandas" -version = "2.2.0" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, - {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, - {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, - {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, - {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, - {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, - {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] numpy = [ - {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -2504,6 +2586,7 @@ parquet = ["pyarrow (>=10.0.1)"] performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] plot = ["matplotlib (>=3.6.3)"] postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] spss = ["pyreadstat (>=1.2.0)"] sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] @@ -2523,14 +2606,14 @@ files = [ [[package]] name = "paramiko" -version = "3.4.0" +version = "3.5.1" description = "SSH2 protocol library" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, - {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, + {file = "paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61"}, + {file = "paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822"}, ] [package.dependencies] @@ -2545,19 +2628,19 @@ invoke = ["invoke (>=2.0)"] [[package]] name = "parso" -version = "0.8.3" +version = "0.8.4" description = "A Python Parser" category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, ] [package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] [[package]] name = "pastel" @@ -2600,30 +2683,31 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -2651,14 +2735,14 @@ poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "prometheus-client" -version = "0.20.0" +version = "0.21.1" description = "Python client for the Prometheus monitoring system." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, + {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, + {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, ] [package.extras] @@ -2666,14 +2750,14 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.50" description = "Library for building powerful interactive command lines in Python" category = "dev" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, + {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, ] [package.dependencies] @@ -2681,52 +2765,53 @@ wcwidth = "*" [[package]] name = "psutil" -version = "5.9.8" +version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"}, + {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"}, + {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"}, + {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"}, + {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"}, + {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"}, + {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"}, + {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "psycopg2" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, - {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, - {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, - {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, - {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, - {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, - {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, - {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, - {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, - {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, + {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, + {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, + {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, + {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, + {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, + {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, + {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, + {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, + {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, + {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, ] [[package]] @@ -2743,14 +2828,14 @@ files = [ [[package]] name = "pure-eval" -version = "0.2.2" +version = "0.2.3" description = "Safely evaluate AST nodes without side effects" category = "dev" optional = false python-versions = "*" files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] [package.extras] @@ -2789,14 +2874,14 @@ files = [ [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] @@ -2854,36 +2939,35 @@ files = [ [[package]] name = "pygments" -version = "2.17.2" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] -plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.10.1" description = "JSON Web Token implementation in Python" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, ] [package.extras] crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] @@ -3046,14 +3130,14 @@ pytest = ">=2.2.3" [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -3076,126 +3160,137 @@ cli = ["click (>=5.0)"] [[package]] name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" +version = "3.2.1" +description = "JSON Log Formatter for the Python Logging Package" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, + {file = "python_json_logger-3.2.1-py3-none-any.whl", hash = "sha256:cdc17047eb5374bd311e748b42f99d71223f3b0e186f4206cc5d52aefe85b090"}, + {file = "python_json_logger-3.2.1.tar.gz", hash = "sha256:8eb0554ea17cb75b05d2848bc14fb02fbdbd9d6972120781b974380bfa162008"}, ] +[package.extras] +dev = ["backports.zoneinfo", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec", "msgspec-python313-pre", "mypy", "orjson", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] + [[package]] name = "pytz" -version = "2024.1" +version = "2025.1" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] name = "pywin32" -version = "306" +version = "308" description = "Python for Window Extensions" category = "dev" optional = false python-versions = "*" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] name = "pywinpty" -version = "2.0.12" +version = "2.0.15" description = "Pseudo terminal support for Windows from Python." category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pywinpty-2.0.12-cp310-none-win_amd64.whl", hash = "sha256:21319cd1d7c8844fb2c970fb3a55a3db5543f112ff9cfcd623746b9c47501575"}, - {file = "pywinpty-2.0.12-cp311-none-win_amd64.whl", hash = "sha256:853985a8f48f4731a716653170cd735da36ffbdc79dcb4c7b7140bce11d8c722"}, - {file = "pywinpty-2.0.12-cp312-none-win_amd64.whl", hash = "sha256:1617b729999eb6713590e17665052b1a6ae0ad76ee31e60b444147c5b6a35dca"}, - {file = "pywinpty-2.0.12-cp38-none-win_amd64.whl", hash = "sha256:189380469ca143d06e19e19ff3fba0fcefe8b4a8cc942140a6b863aed7eebb2d"}, - {file = "pywinpty-2.0.12-cp39-none-win_amd64.whl", hash = "sha256:7520575b6546db23e693cbd865db2764097bd6d4ef5dc18c92555904cd62c3d4"}, - {file = "pywinpty-2.0.12.tar.gz", hash = "sha256:8197de460ae8ebb7f5d1701dfa1b5df45b157bb832e92acba316305e18ca00dd"}, + {file = "pywinpty-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:8e7f5de756a615a38b96cd86fa3cd65f901ce54ce147a3179c45907fa11b4c4e"}, + {file = "pywinpty-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:9a6bcec2df2707aaa9d08b86071970ee32c5026e10bcc3cc5f6f391d85baf7ca"}, + {file = "pywinpty-2.0.15-cp312-cp312-win_amd64.whl", hash = "sha256:83a8f20b430bbc5d8957249f875341a60219a4e971580f2ba694fbfb54a45ebc"}, + {file = "pywinpty-2.0.15-cp313-cp313-win_amd64.whl", hash = "sha256:ab5920877dd632c124b4ed17bc6dd6ef3b9f86cd492b963ffdb1a67b85b0f408"}, + {file = "pywinpty-2.0.15-cp313-cp313t-win_amd64.whl", hash = "sha256:a4560ad8c01e537708d2790dbe7da7d986791de805d89dd0d3697ca59e9e4901"}, + {file = "pywinpty-2.0.15-cp39-cp39-win_amd64.whl", hash = "sha256:d261cd88fcd358cfb48a7ca0700db3e1c088c9c10403c9ebc0d8a8b57aa6a117"}, + {file = "pywinpty-2.0.15.tar.gz", hash = "sha256:312cf39153a8736c617d45ce8b6ad6cd2107de121df91c455b10ce6bba7a39b2"}, ] [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -3215,199 +3310,172 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "25.1.2" +version = "26.2.1" description = "Python bindings for 0MQ" category = "dev" optional = false -python-versions = ">=3.6" -files = [ - {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, - {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, - {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, - {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, - {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, - {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, - {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, - {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, - {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, - {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, - {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, - {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, - {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, - {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, - {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, - {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, - {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, - {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, - {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, - {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, - {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, - {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, - {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, - {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, - {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, - {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, - {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, - {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, - {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, - {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, - {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, - {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, - {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, - {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, - {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, - {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, - {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, - {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, - {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, - {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, - {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, - {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, - {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, - {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, - {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, -] - -[package.dependencies] -cffi = {version = "*", markers = "implementation_name == \"pypy\""} - -[[package]] -name = "qtconsole" -version = "5.5.1" -description = "Jupyter Qt console" -category = "dev" -optional = false -python-versions = ">= 3.8" -files = [ - {file = "qtconsole-5.5.1-py3-none-any.whl", hash = "sha256:8c75fa3e9b4ed884880ff7cea90a1b67451219279ec33deaee1d59e3df1a5d2b"}, - {file = "qtconsole-5.5.1.tar.gz", hash = "sha256:a0e806c6951db9490628e4df80caec9669b65149c7ba40f9bf033c025a5b56bc"}, -] - -[package.dependencies] -ipykernel = ">=4.1" -jupyter-client = ">=4.1" -jupyter-core = "*" -packaging = "*" -pygments = "*" -pyzmq = ">=17.1" -qtpy = ">=2.4.0" -traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" - -[package.extras] -doc = ["Sphinx (>=1.3)"] -test = ["flaky", "pytest", "pytest-qt"] - -[[package]] -name = "qtpy" -version = "2.4.1" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." -category = "dev" -optional = false python-versions = ">=3.7" files = [ - {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, - {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, + {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, + {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95f5728b367a042df146cec4340d75359ec6237beebf4a8f5cf74657c65b9257"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f7b01b3f275504011cf4cf21c6b885c8d627ce0867a7e83af1382ebab7b3ff"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a00370a2ef2159c310e662c7c0f2d030f437f35f478bb8b2f70abd07e26b24"}, + {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8531ed35dfd1dd2af95f5d02afd6545e8650eedbf8c3d244a554cf47d8924459"}, + {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cdb69710e462a38e6039cf17259d328f86383a06c20482cc154327968712273c"}, + {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e7eeaef81530d0b74ad0d29eec9997f1c9230c2f27242b8d17e0ee67662c8f6e"}, + {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:361edfa350e3be1f987e592e834594422338d7174364763b7d3de5b0995b16f3"}, + {file = "pyzmq-26.2.1-cp310-cp310-win32.whl", hash = "sha256:637536c07d2fb6a354988b2dd1d00d02eb5dd443f4bbee021ba30881af1c28aa"}, + {file = "pyzmq-26.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:45fad32448fd214fbe60030aa92f97e64a7140b624290834cc9b27b3a11f9473"}, + {file = "pyzmq-26.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:d9da0289d8201c8a29fd158aaa0dfe2f2e14a181fd45e2dc1fbf969a62c1d594"}, + {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:c059883840e634a21c5b31d9b9a0e2b48f991b94d60a811092bc37992715146a"}, + {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed038a921df836d2f538e509a59cb638df3e70ca0fcd70d0bf389dfcdf784d2a"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9027a7fcf690f1a3635dc9e55e38a0d6602dbbc0548935d08d46d2e7ec91f454"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d75fcb00a1537f8b0c0bb05322bc7e35966148ffc3e0362f0369e44a4a1de99"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0019cc804ac667fb8c8eaecdb66e6d4a68acf2e155d5c7d6381a5645bd93ae4"}, + {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f19dae58b616ac56b96f2e2290f2d18730a898a171f447f491cc059b073ca1fa"}, + {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f5eeeb82feec1fc5cbafa5ee9022e87ffdb3a8c48afa035b356fcd20fc7f533f"}, + {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:000760e374d6f9d1a3478a42ed0c98604de68c9e94507e5452951e598ebecfba"}, + {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:817fcd3344d2a0b28622722b98500ae9c8bfee0f825b8450932ff19c0b15bebd"}, + {file = "pyzmq-26.2.1-cp311-cp311-win32.whl", hash = "sha256:88812b3b257f80444a986b3596e5ea5c4d4ed4276d2b85c153a6fbc5ca457ae7"}, + {file = "pyzmq-26.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ef29630fde6022471d287c15c0a2484aba188adbfb978702624ba7a54ddfa6c1"}, + {file = "pyzmq-26.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:f32718ee37c07932cc336096dc7403525301fd626349b6eff8470fe0f996d8d7"}, + {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:a6549ecb0041dafa55b5932dcbb6c68293e0bd5980b5b99f5ebb05f9a3b8a8f3"}, + {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0250c94561f388db51fd0213cdccbd0b9ef50fd3c57ce1ac937bf3034d92d72e"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ee4297d9e4b34b5dc1dd7ab5d5ea2cbba8511517ef44104d2915a917a56dc8"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2a9cb17fd83b7a3a3009901aca828feaf20aa2451a8a487b035455a86549c09"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786dd8a81b969c2081b31b17b326d3a499ddd1856e06d6d79ad41011a25148da"}, + {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2d88ba221a07fc2c5581565f1d0fe8038c15711ae79b80d9462e080a1ac30435"}, + {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c84c1297ff9f1cd2440da4d57237cb74be21fdfe7d01a10810acba04e79371a"}, + {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46d4ebafc27081a7f73a0f151d0c38d4291656aa134344ec1f3d0199ebfbb6d4"}, + {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:91e2bfb8e9a29f709d51b208dd5f441dc98eb412c8fe75c24ea464734ccdb48e"}, + {file = "pyzmq-26.2.1-cp312-cp312-win32.whl", hash = "sha256:4a98898fdce380c51cc3e38ebc9aa33ae1e078193f4dc641c047f88b8c690c9a"}, + {file = "pyzmq-26.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0741edbd0adfe5f30bba6c5223b78c131b5aa4a00a223d631e5ef36e26e6d13"}, + {file = "pyzmq-26.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:e5e33b1491555843ba98d5209439500556ef55b6ab635f3a01148545498355e5"}, + {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:099b56ef464bc355b14381f13355542e452619abb4c1e57a534b15a106bf8e23"}, + {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:651726f37fcbce9f8dd2a6dab0f024807929780621890a4dc0c75432636871be"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57dd4d91b38fa4348e237a9388b4423b24ce9c1695bbd4ba5a3eada491e09399"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d51a7bfe01a48e1064131f3416a5439872c533d756396be2b39e3977b41430f9"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7154d228502e18f30f150b7ce94f0789d6b689f75261b623f0fdc1eec642aab"}, + {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:f1f31661a80cc46aba381bed475a9135b213ba23ca7ff6797251af31510920ce"}, + {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:290c96f479504439b6129a94cefd67a174b68ace8a8e3f551b2239a64cfa131a"}, + {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f2c307fbe86e18ab3c885b7e01de942145f539165c3360e2af0f094dd440acd9"}, + {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b314268e716487bfb86fcd6f84ebbe3e5bec5fac75fdf42bc7d90fdb33f618ad"}, + {file = "pyzmq-26.2.1-cp313-cp313-win32.whl", hash = "sha256:edb550616f567cd5603b53bb52a5f842c0171b78852e6fc7e392b02c2a1504bb"}, + {file = "pyzmq-26.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:100a826a029c8ef3d77a1d4c97cbd6e867057b5806a7276f2bac1179f893d3bf"}, + {file = "pyzmq-26.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:6991ee6c43e0480deb1b45d0c7c2bac124a6540cba7db4c36345e8e092da47ce"}, + {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:25e720dba5b3a3bb2ad0ad5d33440babd1b03438a7a5220511d0c8fa677e102e"}, + {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:9ec6abfb701437142ce9544bd6a236addaf803a32628d2260eb3dbd9a60e2891"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e1eb9d2bfdf5b4e21165b553a81b2c3bd5be06eeddcc4e08e9692156d21f1f6"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90dc731d8e3e91bcd456aa7407d2eba7ac6f7860e89f3766baabb521f2c1de4a"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6a93d684278ad865fc0b9e89fe33f6ea72d36da0e842143891278ff7fd89c3"}, + {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c1bb37849e2294d519117dd99b613c5177934e5c04a5bb05dd573fa42026567e"}, + {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:632a09c6d8af17b678d84df442e9c3ad8e4949c109e48a72f805b22506c4afa7"}, + {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:fc409c18884eaf9ddde516d53af4f2db64a8bc7d81b1a0c274b8aa4e929958e8"}, + {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:17f88622b848805d3f6427ce1ad5a2aa3cf61f12a97e684dab2979802024d460"}, + {file = "pyzmq-26.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ef584f13820d2629326fe20cc04069c21c5557d84c26e277cfa6235e523b10f"}, + {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:160194d1034902937359c26ccfa4e276abffc94937e73add99d9471e9f555dd6"}, + {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:574b285150afdbf0a0424dddf7ef9a0d183988eb8d22feacb7160f7515e032cb"}, + {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44dba28c34ce527cf687156c81f82bf1e51f047838d5964f6840fd87dfecf9fe"}, + {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9fbdb90b85c7624c304f72ec7854659a3bd901e1c0ffb2363163779181edeb68"}, + {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a7ad34a2921e8f76716dc7205c9bf46a53817e22b9eec2e8a3e08ee4f4a72468"}, + {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:866c12b7c90dd3a86983df7855c6f12f9407c8684db6aa3890fc8027462bda82"}, + {file = "pyzmq-26.2.1-cp37-cp37m-win32.whl", hash = "sha256:eeb37f65350d5c5870517f02f8bbb2ac0fbec7b416c0f4875219fef305a89a45"}, + {file = "pyzmq-26.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4eb3197f694dfb0ee6af29ef14a35f30ae94ff67c02076eef8125e2d98963cd0"}, + {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:36d4e7307db7c847fe37413f333027d31c11d5e6b3bacbb5022661ac635942ba"}, + {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1c6ae0e95d0a4b0cfe30f648a18e764352d5415279bdf34424decb33e79935b8"}, + {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5b4fc44f5360784cc02392f14235049665caaf7c0fe0b04d313e763d3338e463"}, + {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:51431f6b2750eb9b9d2b2952d3cc9b15d0215e1b8f37b7a3239744d9b487325d"}, + {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbc78ae2065042de48a65f1421b8af6b76a0386bb487b41955818c3c1ce7bed"}, + {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d14f50d61a89b0925e4d97a0beba6053eb98c426c5815d949a43544f05a0c7ec"}, + {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:004837cb958988c75d8042f5dac19a881f3d9b3b75b2f574055e22573745f841"}, + {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b2007f28ce1b8acebdf4812c1aab997a22e57d6a73b5f318b708ef9bcabbe95"}, + {file = "pyzmq-26.2.1-cp38-cp38-win32.whl", hash = "sha256:269c14904da971cb5f013100d1aaedb27c0a246728c341d5d61ddd03f463f2f3"}, + {file = "pyzmq-26.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:31fff709fef3b991cfe7189d2cfe0c413a1d0e82800a182cfa0c2e3668cd450f"}, + {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a4bffcadfd40660f26d1b3315a6029fd4f8f5bf31a74160b151f5c577b2dc81b"}, + {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e76ad4729c2f1cf74b6eb1bdd05f6aba6175999340bd51e6caee49a435a13bf5"}, + {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8b0f5bab40a16e708e78a0c6ee2425d27e1a5d8135c7a203b4e977cee37eb4aa"}, + {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8e47050412f0ad3a9b2287779758073cbf10e460d9f345002d4779e43bb0136"}, + {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f18ce33f422d119b13c1363ed4cce245b342b2c5cbbb76753eabf6aa6f69c7d"}, + {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ceb0d78b7ef106708a7e2c2914afe68efffc0051dc6a731b0dbacd8b4aee6d68"}, + {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ebdd96bd637fd426d60e86a29ec14b8c1ab64b8d972f6a020baf08a30d1cf46"}, + {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03719e424150c6395b9513f53a5faadcc1ce4b92abdf68987f55900462ac7eec"}, + {file = "pyzmq-26.2.1-cp39-cp39-win32.whl", hash = "sha256:ef5479fac31df4b304e96400fc67ff08231873ee3537544aa08c30f9d22fce38"}, + {file = "pyzmq-26.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:f92a002462154c176dac63a8f1f6582ab56eb394ef4914d65a9417f5d9fde218"}, + {file = "pyzmq-26.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:1fd4b3efc6f62199886440d5e27dd3ccbcb98dfddf330e7396f1ff421bfbb3c2"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:380816d298aed32b1a97b4973a4865ef3be402a2e760204509b52b6de79d755d"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cbb368fd0debdbeb6ba5966aa28e9a1ae3396c7386d15569a6ca4be4572b99"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf7b5942c6b0dafcc2823ddd9154f419147e24f8df5b41ca8ea40a6db90615c"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fe6e28a8856aea808715f7a4fc11f682b9d29cac5d6262dd8fe4f98edc12d53"}, + {file = "pyzmq-26.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd8fdee945b877aa3bffc6a5a8816deb048dab0544f9df3731ecd0e54d8c84c9"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ee7152f32c88e0e1b5b17beb9f0e2b14454235795ef68c0c120b6d3d23d12833"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:baa1da72aecf6a490b51fba7a51f1ce298a1e0e86d0daef8265c8f8f9848eb77"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:49135bb327fca159262d8fd14aa1f4a919fe071b04ed08db4c7c37d2f0647162"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bacc1a10c150d58e8a9ee2b2037a70f8d903107e0f0b6e079bf494f2d09c091"}, + {file = "pyzmq-26.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:09dac387ce62d69bec3f06d51610ca1d660e7849eb45f68e38e7f5cf1f49cbcb"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:70b3a46ecd9296e725ccafc17d732bfc3cdab850b54bd913f843a0a54dfb2c04"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:59660e15c797a3b7a571c39f8e0b62a1f385f98ae277dfe95ca7eaf05b5a0f12"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0f50db737d688e96ad2a083ad2b453e22865e7e19c7f17d17df416e91ddf67eb"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a003200b6cd64e89b5725ff7e284a93ab24fd54bbac8b4fa46b1ed57be693c27"}, + {file = "pyzmq-26.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f9ba5def063243793dec6603ad1392f735255cbc7202a3a484c14f99ec290705"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1238c2448c58b9c8d6565579393148414a42488a5f916b3f322742e561f6ae0d"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eddb3784aed95d07065bcf94d07e8c04024fdb6b2386f08c197dfe6b3528fda"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0f19c2097fffb1d5b07893d75c9ee693e9cbc809235cf3f2267f0ef6b015f24"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0995fd3530f2e89d6b69a2202e340bbada3191014352af978fa795cb7a446331"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7c6160fe513654e65665332740f63de29ce0d165e053c0c14a161fa60dd0da01"}, + {file = "pyzmq-26.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8ec8e3aea6146b761d6c57fcf8f81fcb19f187afecc19bf1701a48db9617a217"}, + {file = "pyzmq-26.2.1.tar.gz", hash = "sha256:17d72a74e5e9ff3829deb72897a175333d3ef5b5413948cae3cf7ebf0b02ecca"}, ] [package.dependencies] -packaging = "*" - -[package.extras] -test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "redis" -version = "5.0.1" +version = "5.2.1" description = "Python client for Redis database and key-value store" category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, - {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} [package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "referencing" -version = "0.33.0" +version = "0.36.2" description = "JSON Referencing + Python" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "referencing-0.33.0-py3-none-any.whl", hash = "sha256:39240f2ecc770258f28b642dd47fd74bc8b02484de54e1882b74b35ebd779bd5"}, - {file = "referencing-0.33.0.tar.gz", hash = "sha256:c775fedf74bc0f9189c2a3be1c12fd03e8c23f4d371dce795df44e06c5b412f7"}, + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, ] [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -3449,111 +3517,115 @@ files = [ [[package]] name = "rpds-py" -version = "0.18.0" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, - {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, - {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, - {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, - {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, - {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, - {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, - {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, - {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, - {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, - {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, - {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, - {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, - {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, - {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, - {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, - {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, - {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, - {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, - {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, - {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, - {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, - {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, - {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, - {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, - {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, - {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, - {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, - {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, - {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, ] [[package]] @@ -3574,14 +3646,14 @@ doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "send2trash" -version = "1.8.2" +version = "1.8.3" description = "Send file to trash natively under Mac OS X, Windows and Linux" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "Send2Trash-1.8.2-py3-none-any.whl", hash = "sha256:a384719d99c07ce1eefd6905d2decb6f8b7ed054025bb0e618919f945de4f679"}, - {file = "Send2Trash-1.8.2.tar.gz", hash = "sha256:c132d59fa44b9ca2b1699af5c86f57ce9f4c5eb56629d5d55fbb7a35f84e2312"}, + {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, + {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, ] [package.extras] @@ -3589,28 +3661,49 @@ nativelib = ["pyobjc-framework-Cocoa", "pywin32"] objc = ["pyobjc-framework-Cocoa"] win32 = ["pywin32"] +[[package]] +name = "setuptools" +version = "75.8.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.9" +files = [ + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.14.0,<1.15.0)", "pytest-mypy"] + [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "sniffio" -version = "1.3.0" +version = "1.3.1" description = "Sniff out which async library your code is running under" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, - {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] @@ -3627,77 +3720,85 @@ files = [ [[package]] name = "soupsieve" -version = "2.5" +version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sqlalchemy" -version = "2.0.27" +version = "2.0.37" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, - {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, - {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, - {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, - {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, - {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, - {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, - {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, - {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6f5d254a22394847245f411a2956976401e84da4288aa70cbcd5190744062c1"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41296bbcaa55ef5fdd32389a35c710133b097f7b2609d8218c0eabded43a1d84"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bedee60385c1c0411378cbd4dc486362f5ee88deceea50002772912d798bb00f"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6c67415258f9f3c69867ec02fea1bf6508153709ecbd731a982442a590f2b7e4"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-win32.whl", hash = "sha256:650dcb70739957a492ad8acff65d099a9586b9b8920e3507ca61ec3ce650bb72"}, + {file = "SQLAlchemy-2.0.37-cp310-cp310-win_amd64.whl", hash = "sha256:93d1543cd8359040c02b6614421c8e10cd7a788c40047dbc507ed46c29ae5636"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78361be6dc9073ed17ab380985d1e45e48a642313ab68ab6afa2457354ff692c"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b661b49d0cb0ab311a189b31e25576b7ac3e20783beb1e1817d72d9d02508bf5"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d57bafbab289e147d064ffbd5cca2d7b1394b63417c0636cea1f2e93d16eb9e8"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2c0913f02341d25fb858e4fb2031e6b0813494cca1ba07d417674128ce11b"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9df21b8d9e5c136ea6cde1c50d2b1c29a2b5ff2b1d610165c23ff250e0704087"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db18ff6b8c0f1917f8b20f8eca35c28bbccb9f83afa94743e03d40203ed83de9"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-win32.whl", hash = "sha256:46954173612617a99a64aee103bcd3f078901b9a8dcfc6ae80cbf34ba23df989"}, + {file = "SQLAlchemy-2.0.37-cp311-cp311-win_amd64.whl", hash = "sha256:7b7e772dc4bc507fdec4ee20182f15bd60d2a84f1e087a8accf5b5b7a0dcf2ba"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2952748ecd67ed3b56773c185e85fc084f6bdcdec10e5032a7c25a6bc7d682ef"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3151822aa1db0eb5afd65ccfafebe0ef5cda3a7701a279c8d0bf17781a793bb4"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa8039b6d20137a4e02603aba37d12cd2dde7887500b8855356682fc33933f4"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cdba1f73b64530c47b27118b7053b8447e6d6f3c8104e3ac59f3d40c33aa9fd"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1b2690456528a87234a75d1a1644cdb330a6926f455403c8e4f6cad6921f9098"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf5ae8a9dcf657fd72144a7fd01f243236ea39e7344e579a121c4205aedf07bb"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-win32.whl", hash = "sha256:ea308cec940905ba008291d93619d92edaf83232ec85fbd514dcb329f3192761"}, + {file = "SQLAlchemy-2.0.37-cp312-cp312-win_amd64.whl", hash = "sha256:635d8a21577341dfe4f7fa59ec394b346da12420b86624a69e466d446de16aff"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c4096727193762e72ce9437e2a86a110cf081241919ce3fab8e89c02f6b6658"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e4fb5ac86d8fe8151966814f6720996430462e633d225497566b3996966b9bdb"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e56a139bfe136a22c438478a86f8204c1eb5eed36f4e15c4224e4b9db01cb3e4"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f95fc8e3f34b5f6b3effb49d10ac97c569ec8e32f985612d9b25dd12d0d2e94"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c505edd429abdfe3643fa3b2e83efb3445a34a9dc49d5f692dd087be966020e0"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:12b0f1ec623cccf058cf21cb544f0e74656618165b083d78145cafde156ea7b6"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-win32.whl", hash = "sha256:293f9ade06b2e68dd03cfb14d49202fac47b7bb94bffcff174568c951fbc7af2"}, + {file = "SQLAlchemy-2.0.37-cp313-cp313-win_amd64.whl", hash = "sha256:d70f53a0646cc418ca4853da57cf3ddddbccb8c98406791f24426f2dd77fd0e2"}, + {file = "SQLAlchemy-2.0.37-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44f569d0b1eb82301b92b72085583277316e7367e038d97c3a1a899d9a05e342"}, + {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2eae3423e538c10d93ae3e87788c6a84658c3ed6db62e6a61bb9495b0ad16bb"}, + {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfff7be361048244c3aa0f60b5e63221c5e0f0e509f4e47b8910e22b57d10ae7"}, + {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:5bc3339db84c5fb9130ac0e2f20347ee77b5dd2596ba327ce0d399752f4fce39"}, + {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:84b9f23b0fa98a6a4b99d73989350a94e4a4ec476b9a7dfe9b79ba5939f5e80b"}, + {file = "SQLAlchemy-2.0.37-cp37-cp37m-win32.whl", hash = "sha256:51bc9cfef83e0ac84f86bf2b10eaccb27c5a3e66a1212bef676f5bee6ef33ebb"}, + {file = "SQLAlchemy-2.0.37-cp37-cp37m-win_amd64.whl", hash = "sha256:8e47f1af09444f87c67b4f1bb6231e12ba6d4d9f03050d7fc88df6d075231a49"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6b788f14c5bb91db7f468dcf76f8b64423660a05e57fe277d3f4fad7b9dcb7ce"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521ef85c04c33009166777c77e76c8a676e2d8528dc83a57836b63ca9c69dcd1"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75311559f5c9881a9808eadbeb20ed8d8ba3f7225bef3afed2000c2a9f4d49b9"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cce918ada64c956b62ca2c2af59b125767097ec1dca89650a6221e887521bfd7"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9d087663b7e1feabea8c578d6887d59bb00388158e8bff3a76be11aa3f748ca2"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cf95a60b36997dad99692314c4713f141b61c5b0b4cc5c3426faad570b31ca01"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-win32.whl", hash = "sha256:d75ead7dd4d255068ea0f21492ee67937bd7c90964c8f3c2bea83c7b7f81b95f"}, + {file = "SQLAlchemy-2.0.37-cp38-cp38-win_amd64.whl", hash = "sha256:74bbd1d0a9bacf34266a7907d43260c8d65d31d691bb2356f41b17c2dca5b1d0"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:648ec5acf95ad59255452ef759054f2176849662af4521db6cb245263ae4aa33"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:35bd2df269de082065d4b23ae08502a47255832cc3f17619a5cea92ce478b02b"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f581d365af9373a738c49e0c51e8b18e08d8a6b1b15cc556773bcd8a192fa8b"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82df02816c14f8dc9f4d74aea4cb84a92f4b0620235daa76dde002409a3fbb5a"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94b564e38b344d3e67d2e224f0aec6ba09a77e4582ced41e7bfd0f757d926ec9"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:955a2a765aa1bd81aafa69ffda179d4fe3e2a3ad462a736ae5b6f387f78bfeb8"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-win32.whl", hash = "sha256:03f0528c53ca0b67094c4764523c1451ea15959bbf0a8a8a3096900014db0278"}, + {file = "SQLAlchemy-2.0.37-cp39-cp39-win_amd64.whl", hash = "sha256:4b12885dc85a2ab2b7d00995bac6d967bffa8594123b02ed21e8eb2205a7584b"}, + {file = "SQLAlchemy-2.0.37-py3-none-any.whl", hash = "sha256:a8998bf9f8658bd3839cbc44ddbe982955641863da0c1efe5b00c1ab4f5c16b1"}, + {file = "sqlalchemy-2.0.37.tar.gz", hash = "sha256:12b28d99a9c14eaf4055810df1001557176716de0167b91026e648e65229bffb"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} typing-extensions = ">=4.6.0" [package.extras] @@ -3706,7 +3807,7 @@ aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] mssql-pyodbc = ["pyodbc"] @@ -3765,14 +3866,14 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "terminado" -version = "0.18.0" +version = "0.18.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "terminado-0.18.0-py3-none-any.whl", hash = "sha256:87b0d96642d0fe5f5abd7783857b9cab167f221a39ff98e3b9619a788a3c0f2e"}, - {file = "terminado-0.18.0.tar.gz", hash = "sha256:1ea08a89b835dd1b8c0c900d92848147cef2537243361b2e3f4dc15df9b6fded"}, + {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, + {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, ] [package.dependencies] @@ -3787,14 +3888,14 @@ typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] [[package]] name = "tinycss2" -version = "1.2.1" +version = "1.4.0" description = "A tiny CSS parser" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, - {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, + {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, + {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, ] [package.dependencies] @@ -3802,79 +3903,109 @@ webencodings = ">=0.4" [package.extras] doc = ["sphinx", "sphinx_rtd_theme"] -test = ["flake8", "isort", "pytest"] +test = ["pytest", "ruff"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tomlkit" -version = "0.12.3" +version = "0.13.2" description = "Style preserving TOML library" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, - {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] name = "tornado" -version = "6.4" +version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "dev" optional = false -python-versions = ">= 3.8" +python-versions = ">=3.8" files = [ - {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, - {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, - {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, - {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, - {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, - {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, - {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, + {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, + {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, + {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, ] [[package]] name = "traitlets" -version = "5.14.1" +version = "5.14.3" description = "Traitlets Python configuration system" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, - {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "types-python-dateutil" -version = "2.8.19.20240106" +version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.8.19.20240106.tar.gz", hash = "sha256:1f8db221c3b98e6ca02ea83a58371b22c374f42ae5bbdf186db9c9a76581459f"}, - {file = "types_python_dateutil-2.8.19.20240106-py3-none-any.whl", hash = "sha256:efbbdc54590d0f16152fa103c9879c7d4a00e82078f6e2cf01769042165acaa2"}, + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] [[package]] @@ -3906,38 +4037,38 @@ files = [ [[package]] name = "types-waitress" -version = "2.1.4.20240106" +version = "2.1.4.20240421" description = "Typing stubs for waitress" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "types-waitress-2.1.4.20240106.tar.gz", hash = "sha256:65a7240a0771032b2aa073d09f63020aa594c7d84e05b6fefe354ef6f2c47fc2"}, - {file = "types_waitress-2.1.4.20240106-py3-none-any.whl", hash = "sha256:0a608efb7769cff76affa2c9173e5081be95b5dc137677e43fbd826bbf333fe4"}, + {file = "types-waitress-2.1.4.20240421.tar.gz", hash = "sha256:3f961b452865979ba6a09dd3ea79bcce1cfee685a01aad03766c4f9d564651c6"}, + {file = "types_waitress-2.1.4.20240421-py3-none-any.whl", hash = "sha256:0c2d39265e096add609f4d8085f1bf1721e0a91a602a1f0a9187f3f8f3a2a328"}, ] [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "tzdata" -version = "2024.1" +version = "2025.1" description = "Provider of IANA time zone data" category = "main" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] @@ -3957,14 +4088,14 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "1.26.18" +version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, + {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] @@ -4007,41 +4138,42 @@ testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] [[package]] name = "watchdog" -version = "4.0.0" +version = "6.0.0" description = "Filesystem events monitoring" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, - {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, - {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, - {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, - {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, - {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, - {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, - {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, - {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, - {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, - {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, - {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, - {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, - {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, + {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, + {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, + {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, + {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, + {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, + {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, + {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, + {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, + {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, + {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, + {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, + {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, ] [package.extras] @@ -4061,20 +4193,16 @@ files = [ [[package]] name = "webcolors" -version = "1.13" +version = "24.11.1" description = "A library for working with the color formats defined by HTML and CSS." category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, - {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, + {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, + {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, ] -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["pytest", "pytest-cov"] - [[package]] name = "webencodings" version = "0.5.1" @@ -4089,31 +4217,31 @@ files = [ [[package]] name = "websocket-client" -version = "1.7.0" +version = "1.8.0" description = "WebSocket client for Python with low level API options" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, - {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, ] [package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] name = "werkzeug" -version = "3.0.1" +version = "3.1.3" description = "The comprehensive WSGI web application library." category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, - {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, ] [package.dependencies] @@ -4124,111 +4252,124 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "widgetsnbextension" -version = "4.0.10" +version = "4.0.13" description = "Jupyter interactive widgets for Jupyter Notebook" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, - {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, + {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, + {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, ] [[package]] name = "wrapt" -version = "1.16.0" +version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, + {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, + {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, + {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, + {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, + {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, + {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, + {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, + {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, + {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, + {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, + {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, + {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, + {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, + {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, + {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, + {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] [[package]] name = "zipp" -version = "3.17.0" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" diff --git a/tests/conftest.py b/tests/conftest.py index 96c63dff..b2717e01 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -307,6 +307,12 @@ def clients(flask_app): ) assert response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 5 + yield _logged_in_client, _admin_client, _editor_client, _viewer_client ctx.pop() diff --git a/tests/functional/test_user.py b/tests/functional/test_user.py index d325e713..b4202ea8 100644 --- a/tests/functional/test_user.py +++ b/tests/functional/test_user.py @@ -1,14 +1,15 @@ -"""Tests for user settings""" - +from model.db import db # ------------------- Password Change ------------------- # + + def test_post_password_change(clients): """ Given a Flask application configured for testing WHEN the '/auth/password/change' endpoint is requested (PUT) THEN check that the response is valid and the password is changed """ - _logged_in_client = clients[0] + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients response = _logged_in_client.post( "/auth/password/change", @@ -18,8 +19,40 @@ def test_post_password_change(clients): "old_password": "Testingyeshello11!", }, ) + a_response = _admin_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + e_response = _editor_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) + v_response = _viewer_client.post( + "/auth/password/change", + json={ + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + "old_password": "Testingyeshello11!", + }, + ) assert response.status_code == 200 + assert a_response.status_code == 200 + assert e_response.status_code == 200 + assert v_response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 0 def test_post_password_login_invalid_old_password(clients): @@ -28,7 +61,7 @@ def test_post_password_login_invalid_old_password(clients): WHEN the '/auth/login' endpoint is requested (POST) THEN check that the response is an error when old password is provided """ - _logged_in_client = clients[0] + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients response = _logged_in_client.post( "/auth/login", json={ @@ -36,8 +69,31 @@ def test_post_password_login_invalid_old_password(clients): "password": "Testingyeshello11!", }, ) - + a_response = _admin_client.post( + "/auth/login", + json={ + "email_address": "admin@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + e_response = _editor_client.post( + "/auth/login", + json={ + "email_address": "editor@fairhub.io", + "password": "Testingyeshello11!", + }, + ) + v_response = _viewer_client.post( + "/auth/login", + json={ + "email_address": "viewer@fairhub.io", + "password": "Testingyeshello11!", + }, + ) assert response.status_code == 401 + assert a_response.status_code == 401 + assert e_response.status_code == 401 + assert v_response.status_code == 401 def test_post_login_new_password(clients): @@ -54,5 +110,41 @@ def test_post_login_new_password(clients): "password": "Updatedpassword4testing!", }, ) - assert response.status_code == 200 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 1 + + +def test_post_logout(clients): + """ + Given a Flask application configured for testing + WHEN the '/auth/login' endpoint is requested (POST) + THEN check that the response is valid when new password is provided + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + + response = _logged_in_client.post( + "/auth/logout" + ) + a_response = _admin_client.post( + "/auth/logout" + ) + e_response = _editor_client.post( + "/auth/logout" + ) + v_response = _viewer_client.post( + "/auth/logout" + ) + + assert response.status_code == 204 + assert a_response.status_code == 204 + assert e_response.status_code == 204 + assert v_response.status_code == 204 + meta = db.metadata + for table in reversed(meta.sorted_tables): + if table.name == 'session': + session_entries = db.session.execute(table.select()).fetchall() + assert len(session_entries) == 0 From 9bfea8a1c08fbef6ce322c170c0dd7ad7ac0dab4 Mon Sep 17 00:00:00 2001 From: Aydan <62059163+Aydawka@users.noreply.github.com> Date: Wed, 19 Feb 2025 17:41:17 -0800 Subject: [PATCH 480/505] fix: update study acronym table into short description (#60) * fix: update study acronym to short description * fix: update study acronym to short description --- .../5c1257547eb8_update_study_acronym.py | 28 +++++++++++++++++++ apis/study.py | 8 +++--- model/study.py | 6 ++-- tests/functional/test_study_api.py | 28 +++++++++---------- 4 files changed, 49 insertions(+), 21 deletions(-) create mode 100644 alembic/versions/5c1257547eb8_update_study_acronym.py diff --git a/alembic/versions/5c1257547eb8_update_study_acronym.py b/alembic/versions/5c1257547eb8_update_study_acronym.py new file mode 100644 index 00000000..9a87ec03 --- /dev/null +++ b/alembic/versions/5c1257547eb8_update_study_acronym.py @@ -0,0 +1,28 @@ +"""update_study_acronym + +Revision ID: 5c1257547eb8 +Revises: 3ffefbd9c03b +Create Date: 2025-02-19 16:25:24.597207 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = '5c1257547eb8' +down_revision: Union[str, None] = '3ffefbd9c03b' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("study") as batch_op: + batch_op.alter_column( + "acronym", + new_column_name="short_description", + type_=sa.String(300), + nullable=False + ) diff --git a/apis/study.py b/apis/study.py index 19768204..70313a0c 100644 --- a/apis/study.py +++ b/apis/study.py @@ -62,11 +62,11 @@ def post(self): # Schema validation schema = { "type": "object", - "required": ["title", "image", "acronym"], + "required": ["title", "image", "short_description"], "additionalProperties": False, "properties": { "title": {"type": "string", "minLength": 1, "maxLength": 300}, - "acronym": {"type": "string", "maxLength": 14}, + "short_description": {"type": "string", "maxLength": 300}, "image": {"type": "string"}, }, } @@ -115,12 +115,12 @@ def put(self, study_id: int): # Schema validation schema = { "type": "object", - "required": ["title", "image", "acronym"], + "required": ["title", "image", "short_description"], "additionalProperties": False, "properties": { "title": {"type": "string", "minLength": 1}, "image": {"type": "string", "minLength": 1}, - "acronym": {"type": "string", "maxLength": 14}, + "short_description": {"type": "string", "maxLength": 300}, }, } diff --git a/model/study.py b/model/study.py index d09312a1..fa7a05e4 100644 --- a/model/study.py +++ b/model/study.py @@ -34,7 +34,7 @@ def __init__(self): title = db.Column(db.String(300), nullable=False) image = db.Column(db.String, nullable=False) - acronym = db.Column(db.String(14), nullable=False) + short_description = db.Column(db.String(300), nullable=False) created_at = db.Column(db.BigInteger, nullable=False) updated_on = db.Column(db.BigInteger, nullable=False) @@ -170,7 +170,7 @@ def to_dict(self): return { "id": self.id, "title": self.title, - "acronym": self.acronym, + "short_description": self.short_description, "image": self.image, "created_at": self.created_at, "updated_on": self.updated_on, @@ -245,7 +245,7 @@ def update(self, data: dict): self.title = data["title"] self.image = data["image"] - self.acronym = data["acronym"] + self.short_description = data["short_description"] self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() def validate(self): diff --git a/tests/functional/test_study_api.py b/tests/functional/test_study_api.py index 0c4257fc..3e4eb71d 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_study_api.py @@ -17,7 +17,7 @@ def test_post_study(_logged_in_client): json={ "title": "Study Title", "image": "https://api.dicebear.com/6.x/adventurer/svg", - "acronym": "acronym", + "short_description": "short_description", }, ) @@ -26,7 +26,7 @@ def test_post_study(_logged_in_client): assert response_data["title"] == "Study Title" assert response_data["image"] == "https://api.dicebear.com/6.x/adventurer/svg" - assert response_data["acronym"] == "acronym" + assert response_data["short_description"] == "short_description" pytest.global_study_id = response_data @@ -94,7 +94,7 @@ def test_update_study(clients): json={ "title": "Study Title Updated", "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore }, ) @@ -104,7 +104,7 @@ def test_update_study(clients): assert response_data["title"] == "Study Title Updated" assert response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert response_data["id"] == pytest.global_study_id["id"] # type: ignore admin_response = _admin_client.put( @@ -112,7 +112,7 @@ def test_update_study(clients): json={ "title": "Admin Study Title", "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore }, ) @@ -121,7 +121,7 @@ def test_update_study(clients): pytest.global_study_id = admin_response_data assert admin_response_data["title"] == "Admin Study Title" - assert admin_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert admin_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore @@ -130,7 +130,7 @@ def test_update_study(clients): json={ "title": "Editor Study Title", "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore }, ) @@ -140,7 +140,7 @@ def test_update_study(clients): assert editor_response_data["title"] == "Editor Study Title" assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert editor_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert editor_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore viewer_response = _viewer_client.put( @@ -148,7 +148,7 @@ def test_update_study(clients): json={ "title": "Viewer Study Title", "image": pytest.global_study_id["image"], # type: ignore - "acronym": pytest.global_study_id["acronym"], # type: ignore + "short_description": pytest.global_study_id["short_description"], # type: ignore }, ) @@ -185,22 +185,22 @@ def test_get_study_by_id(clients): assert response_data["id"] == pytest.global_study_id["id"] # type: ignore assert response_data["title"] == pytest.global_study_id["title"] # type: ignore assert response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert admin_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert admin_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert admin_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert editor_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert editor_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert editor_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert viewer_response_data["id"] == pytest.global_study_id["id"] # type: ignore assert viewer_response_data["title"] == pytest.global_study_id["title"] # type: ignore assert viewer_response_data["image"] == pytest.global_study_id["image"] # type: ignore - assert viewer_response_data["acronym"] == pytest.global_study_id["acronym"] # type: ignore + assert viewer_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore def test_delete_studies_created(clients): @@ -218,7 +218,7 @@ def test_delete_studies_created(clients): json={ "title": "Delete Me", "image": "https://api.dicebear.com/6.x/adventurer/svg", - "acronym": "acronym", + "short_description": "short_description", }, ) From db6b1f65b8f55c2ab0fa9d811fdc2f4235a8369c Mon Sep 17 00:00:00 2001 From: Aydan <62059163+Aydawka@users.noreply.github.com> Date: Thu, 20 Feb 2025 11:07:24 -0800 Subject: [PATCH 481/505] =?UTF-8?q?=E2=9C=A8=20feat:=20added=20email=20ver?= =?UTF-8?q?ifications=20(#34)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: added invitation templates * fix: added invitation function * fix: email invitation templates * style: 🎨 fix code style issues with Black * chore: added flask mail * fix: refactor invited_contributors table name to invite * fix: study invitation templates * fix: email verification * fix: database changes through migrations * fix: model changes for verification * fix: alembic revisions for email verification table * fix: email verification ands resend endpoint * wip: notification endpoint * style: 🎨 fix code style issues with Black * fix: alembic email verification revision * style: 🎨 fix code style issues with Black * feat: added checkings to the alembic revisions * fix: format * style: 🎨 fix code style issues with Black * fix: revisions * style: 🎨 fix code style issues with Black * fix: testing * fix: docker file * fix: model class and docker file * fix: testing constraint added * fix: testing environment added for notifications * fix: added test env to contributors * fix: deleted script from template * style: 🎨 fix code style issues with Black * fix: final revisions for testing * style: 🎨 fix code style issues with Black * fix: modify db using alembic * fix: set up config file for localhost * fix: invitation templates * style: 🎨 fix code style issues with Black * fix: email invitations * fix: added feature flag * fix: flask mailman * fix: remove auth duplication * fix: final revisions for emails * style: 🎨 fix code style issues with Black * fix: poetry lock * fix: authentication for email verification * fix: disabled alembic revisions * fix: removed comment * fix: upgrade destroy schema to remove alembic table * style: 🎨 fix code style issues with Black * fix: format * fix: sort testing files * fix: sort testing files * fix: additional details from staging * fix: modify template style * style: minor email sending typo * feat: add azure EmailClient system * style: format * ➕ chore: add dependency * ➕ chore: add dependency * style: template formats * fix: email confirmation direction if verification is False * fix: test issue * fix: test issue * chore: replace email names * style: format * fix: logic for sending verification * fix: add check api * fix: modify expiration * fix: authentication * fix: update alembic * fix: unverfied contributors * fix: class name * feat: email verification test * feat: set authentication for testing * fix: comment statement * fix: update poetrly lock * style: lint errors * fix: authentication * fix: alembic * fix: email verification for bypassed emails --------- Co-authored-by: Lint Action Co-authored-by: Sanjay Soundarajan --- .../0ff53a655198_update_email_verification.py | 36 ++ .../6173282aef08_remove_invite_info.py | 24 + ...2ac2b020c7c_delete_dataset_readme_table.py | 0 .../95d6e53e2578_edit_invite_table_pk.py | 38 ++ ...0e07d8924f_delete_token_fron_user_table.py | 27 + .../db1b62d02def_edit_invite_table.py | 25 + ...e9610b2cdc_role_nullable_and_fk_user_id.py | 56 ++ .../f150341d2741_modify_email_verification.py | 35 ++ ...89827ee101_user_table_email_verfication.py | 36 ++ .../fed13d793eff_email_verified_type.py | 27 + apis/__init__.py | 3 +- apis/authentication.py | 137 ++++- apis/contributor.py | 31 +- apis/dataset_metadata/dataset_rights.py | 1 - app.py | 11 +- config.py | 3 + db.py | 3 + ...2ac2b020c7c_delete_dataset_readme_table.py | 1 - invitation/invitation.py | 202 ++++++++ model/email_verification.py | 11 +- model/notification.py | 19 +- model/study.py | 4 +- model/user.py | 62 ++- poetry.lock | 482 +++++++++++------- pyproject.toml | 4 +- sql/init.sql | 14 +- sql/init_timezones.sql | 14 +- sql/specific_tables.sql | 2 +- templates/accept_general_invitation.html | 61 +++ templates/accept_study_invitation.html | 57 +++ templates/device_notification.html | 61 +++ templates/email_verification.html | 55 ++ templates/invite_contributors.html | 67 +++ tests/conftest.py | 55 +- ...er_launch.py => test_010_server_launch.py} | 5 + ...est_study_api.py => test_020_study_api.py} | 5 + ...t_api.py => test_030_study_dataset_api.py} | 0 ...=> test_040_study_dataset_metadata_api.py} | 1 - ..._api.py => test_050_study_metadata_api.py} | 0 ...n_api.py => test_060_study_version_api.py} | 0 .../{test_user.py => test_070_user.py} | 0 41 files changed, 1434 insertions(+), 241 deletions(-) create mode 100644 alembic/versions/0ff53a655198_update_email_verification.py create mode 100644 alembic/versions_backup/6173282aef08_remove_invite_info.py rename tests/functional/__init__.py => alembic/versions_backup/72ac2b020c7c_delete_dataset_readme_table.py (100%) create mode 100644 alembic/versions_backup/95d6e53e2578_edit_invite_table_pk.py create mode 100644 alembic/versions_backup/b20e07d8924f_delete_token_fron_user_table.py create mode 100644 alembic/versions_backup/db1b62d02def_edit_invite_table.py create mode 100644 alembic/versions_backup/eee9610b2cdc_role_nullable_and_fk_user_id.py create mode 100644 alembic/versions_backup/f150341d2741_modify_email_verification.py create mode 100644 alembic/versions_backup/f189827ee101_user_table_email_verfication.py create mode 100644 alembic/versions_backup/fed13d793eff_email_verified_type.py create mode 100644 db.py create mode 100644 invitation/invitation.py create mode 100644 templates/accept_general_invitation.html create mode 100644 templates/accept_study_invitation.html create mode 100644 templates/device_notification.html create mode 100644 templates/email_verification.html create mode 100644 templates/invite_contributors.html rename tests/functional/{test_server_launch.py => test_010_server_launch.py} (89%) rename tests/functional/{test_study_api.py => test_020_study_api.py} (98%) rename tests/functional/{test_study_dataset_api.py => test_030_study_dataset_api.py} (100%) rename tests/functional/{test_study_dataset_metadata_api.py => test_040_study_dataset_metadata_api.py} (99%) rename tests/functional/{test_study_metadata_api.py => test_050_study_metadata_api.py} (100%) rename tests/functional/{test_study_version_api.py => test_060_study_version_api.py} (100%) rename tests/functional/{test_user.py => test_070_user.py} (100%) diff --git a/alembic/versions/0ff53a655198_update_email_verification.py b/alembic/versions/0ff53a655198_update_email_verification.py new file mode 100644 index 00000000..dc958c29 --- /dev/null +++ b/alembic/versions/0ff53a655198_update_email_verification.py @@ -0,0 +1,36 @@ +"""update_email_verification + +Revision ID: 0ff53a655198 +Revises: 3ffefbd9c03b +Create Date: 2025-02-18 13:50:48.808176 + +""" +from typing import Sequence, Union +import sqlalchemy as sa +import datetime +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = '0ff53a655198' +down_revision: Union[str, None] = '5c1257547eb8' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + + +def upgrade() -> None: + op.alter_column("email_verification", "token", type_=sa.String) + op.alter_column("email_verification", "user_id", type_=sa.CHAR(36)) + + op.drop_column("email_verification", "created_at") + + op.add_column( + "email_verification", sa.Column("created_at", sa.BIGINT(), nullable=True) + ) + + op.execute(f"UPDATE \"email_verification\" SET created_at ='{created_at}'") + + op.alter_column("email_verification", "created_at", nullable=False) diff --git a/alembic/versions_backup/6173282aef08_remove_invite_info.py b/alembic/versions_backup/6173282aef08_remove_invite_info.py new file mode 100644 index 00000000..92c029a6 --- /dev/null +++ b/alembic/versions_backup/6173282aef08_remove_invite_info.py @@ -0,0 +1,24 @@ +"""remove_invite_info + +Revision ID: 6173282aef08 +Revises: f150341d2741 +Create Date: 2023-12-19 00:32:08.157538 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "6173282aef08" +down_revision: Union[str, None] = "f150341d2741" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.drop_column("invite", "info") + with op.batch_alter_table("notification") as batch_op: + batch_op.alter_column("target", nullable=True) diff --git a/tests/functional/__init__.py b/alembic/versions_backup/72ac2b020c7c_delete_dataset_readme_table.py similarity index 100% rename from tests/functional/__init__.py rename to alembic/versions_backup/72ac2b020c7c_delete_dataset_readme_table.py diff --git a/alembic/versions_backup/95d6e53e2578_edit_invite_table_pk.py b/alembic/versions_backup/95d6e53e2578_edit_invite_table_pk.py new file mode 100644 index 00000000..e19aa4d3 --- /dev/null +++ b/alembic/versions_backup/95d6e53e2578_edit_invite_table_pk.py @@ -0,0 +1,38 @@ +"""edit invite table PK + +Revision ID: 95d6e53e2578 +Revises: db1b62d02def +Create Date: 2023-11-28 14:58:43.869472 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "95d6e53e2578" +down_revision: Union[str, None] = "db1b62d02def" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade(): + connection = op.get_bind() + inspector = sa.inspect(connection) + + # Check if the table exists before dropping it + # if not inspector.has_table("invite"): + # if 'id' not in inspector.get_columns("invite"): + op.add_column("invite", sa.Column("id", sa.CHAR(36), nullable=True)) + op.execute( + "UPDATE invite SET id = uuid_in(overlay(overlay(md5(random()::text || ':' " + "|| random()::text) placing '4' from 13) placing to_hex(floor(random()*(11-8+1) + 8)::int)" + "::text from 17)::cstring);" + ) + op.execute("ALTER TABLE invite DROP CONSTRAINT invited_study_contributor_pkey") + + op.create_primary_key("id", "invite", ["id"]) + with op.batch_alter_table("invite") as batch_op: + batch_op.alter_column("study_id", nullable=True) diff --git a/alembic/versions_backup/b20e07d8924f_delete_token_fron_user_table.py b/alembic/versions_backup/b20e07d8924f_delete_token_fron_user_table.py new file mode 100644 index 00000000..5e07b699 --- /dev/null +++ b/alembic/versions_backup/b20e07d8924f_delete_token_fron_user_table.py @@ -0,0 +1,27 @@ +"""delete_token_fron_user_table + +Revision ID: b20e07d8924f +Revises: f189827ee101 +Create Date: 2023-12-13 13:31:38.810816 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "b20e07d8924f" +down_revision: Union[str, None] = "f189827ee101" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.drop_column("user", "token") + op.drop_column("user", "token_generated") + + +def downgrade() -> None: + pass diff --git a/alembic/versions_backup/db1b62d02def_edit_invite_table.py b/alembic/versions_backup/db1b62d02def_edit_invite_table.py new file mode 100644 index 00000000..2f9be132 --- /dev/null +++ b/alembic/versions_backup/db1b62d02def_edit_invite_table.py @@ -0,0 +1,25 @@ +"""edit invite table + +Revision ID: db1b62d02def +Revises: 72ac2b020c7c +Create Date: 2023-11-28 13:56:41.821141 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + + +revision: str = "db1b62d02def" +down_revision: Union[str, None] = "72ac2b020c7c" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.rename_table("invited_study_contributor", "invite") + op.add_column("invite", sa.Column("info", sa.String(), nullable=True)) + op.create_unique_constraint( + "study_per_user", "invite", ["study_id", "email_address"] + ) diff --git a/alembic/versions_backup/eee9610b2cdc_role_nullable_and_fk_user_id.py b/alembic/versions_backup/eee9610b2cdc_role_nullable_and_fk_user_id.py new file mode 100644 index 00000000..310bf7af --- /dev/null +++ b/alembic/versions_backup/eee9610b2cdc_role_nullable_and_fk_user_id.py @@ -0,0 +1,56 @@ +"""role_nullable_and_fk_user_id + +Revision ID: eee9610b2cdc +Revises: 95d6e53e2578 +Create Date: 2023-12-01 00:09:44.745776 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +import datetime +import uuid +from alembic import op + + +# revision identifiers, used by Alembic. +revision: str = "eee9610b2cdc" +down_revision: Union[str, None] = "95d6e53e2578" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + +created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + +id = str(uuid.uuid4()) +hashed = str(uuid.uuid4()) + + +def upgrade() -> None: + op.add_column("invite", sa.Column("user_id", sa.CHAR(36))) + op.execute( + f'INSERT INTO "user" ("id", "email_address", "username", "hash", "created_at", "email_verified") VALUES ' + f"('{id}', 'eee9610b2cdc@fairhub.io', 'eee9610b2cdc', '{hashed}', '{created_at}', False)" + ) + + user_obj = f"SELECT * FROM user WHERE id = '{id}'" + if len(user_obj) < 1: + return "error", 403 + op.execute(f"UPDATE invite SET user_id ='{id}'") + + with op.batch_alter_table("invite") as batch_op: + batch_op.alter_column("permission", nullable=True) + batch_op.alter_column("user_id", nullable=False) + op.create_foreign_key( + "fk_user_id", + "invite", + "user", + ["user_id"], + ["id"], + ) + + with op.batch_alter_table("notification") as batch_op: + batch_op.alter_column("title", nullable=False) + batch_op.alter_column("message", nullable=False) + batch_op.alter_column("type", nullable=False) + batch_op.alter_column("target", nullable=False) + batch_op.alter_column("read", nullable=False) diff --git a/alembic/versions_backup/f150341d2741_modify_email_verification.py b/alembic/versions_backup/f150341d2741_modify_email_verification.py new file mode 100644 index 00000000..ce272600 --- /dev/null +++ b/alembic/versions_backup/f150341d2741_modify_email_verification.py @@ -0,0 +1,35 @@ +"""modify_email_verification + +Revision ID: f150341d2741 +Revises: b20e07d8924f +Create Date: 2023-12-13 20:43:24.637259 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +import datetime +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "f150341d2741" +down_revision: Union[str, None] = "b20e07d8924f" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + +created_at = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) + + +def upgrade() -> None: + op.alter_column("email_verification", "token", type_=sa.String) + op.alter_column("email_verification", "user_id", type_=sa.CHAR(36)) + + op.drop_column("email_verification", "created_at") + + op.add_column( + "email_verification", sa.Column("created_at", sa.BIGINT(), nullable=True) + ) + + op.execute(f"UPDATE \"email_verification\" SET created_at ='{created_at}'") + + op.alter_column("email_verification", "created_at", nullable=False) diff --git a/alembic/versions_backup/f189827ee101_user_table_email_verfication.py b/alembic/versions_backup/f189827ee101_user_table_email_verfication.py new file mode 100644 index 00000000..28c1634e --- /dev/null +++ b/alembic/versions_backup/f189827ee101_user_table_email_verfication.py @@ -0,0 +1,36 @@ +"""user_table_email_verfication + +Revision ID: f189827ee101 +Revises: fed13d793eff +Create Date: 2023-12-11 14:54:31.303523 + +""" +from typing import Sequence, Union +import sqlalchemy as sa +import datetime +import random +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "f189827ee101" +down_revision: Union[str, None] = "fed13d793eff" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None +token_generated = int(datetime.datetime.now(datetime.timezone.utc).timestamp()) +token = random.randint(10 ** (7 - 1), (10**7) - 1) + + +def upgrade(): + op.add_column("user", sa.Column("token_generated", sa.BIGINT, nullable=True)) + op.add_column("user", sa.Column("token", sa.String, nullable=True)) + op.execute(f"UPDATE \"user\" SET token_generated ='{token_generated}'") + op.execute(f"UPDATE \"user\" SET token ='{token}'") + op.execute("UPDATE invite SET info ='info'") + op.execute(f'UPDATE "user" SET email_verified = FALSE') + + with op.batch_alter_table("user") as batch_op: + batch_op.alter_column("token", nullable=False) + batch_op.alter_column("token_generated", nullable=False) + batch_op.alter_column("email_verified", nullable=False) + with op.batch_alter_table("invite") as batch_op: + batch_op.alter_column("info", nullable=False) diff --git a/alembic/versions_backup/fed13d793eff_email_verified_type.py b/alembic/versions_backup/fed13d793eff_email_verified_type.py new file mode 100644 index 00000000..fbc32a58 --- /dev/null +++ b/alembic/versions_backup/fed13d793eff_email_verified_type.py @@ -0,0 +1,27 @@ +"""email_verified_type + +Revision ID: fed13d793eff +Revises: eee9610b2cdc +Create Date: 2023-12-05 16:03:51.166254 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "fed13d793eff" +down_revision: Union[str, None] = "eee9610b2cdc" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + with op.batch_alter_table("user") as batch_op: + batch_op.alter_column( + "email_verified", + type_=sa.Boolean(), + postgresql_using="email_verified::boolean", + ) diff --git a/apis/__init__.py b/apis/__init__.py index c58a7559..caefd02b 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -1,7 +1,6 @@ """Initialize the api system for the backend""" from flask_restx import Api, Resource - from apis.dataset_metadata_namespace import api as dataset_metadata_namespace from apis.study_metadata_namespace import api as study_metadata_namespace @@ -98,6 +97,7 @@ "redcap", "dashboard", "utils", + # "invite_general_users", ] @@ -114,7 +114,6 @@ class HelloEverynyan(Resource): @api.response(400, "Validation Error") def get(self): """Returns a simple 'Server Active' message""" - return "Server active!" diff --git a/apis/authentication.py b/apis/authentication.py index c6d11199..27202edc 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -17,6 +17,9 @@ from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate import model +from invitation.invitation import ( + send_email_verification, +) api = Namespace("Authentication", description="Authentication paths", path="/") @@ -148,17 +151,92 @@ def validate_password(instance): ).one_or_none() if user: return "This email address is already in use", 409 + new_user = model.User.from_data(data) + verification = model.EmailVerification(new_user) + new_user.email_verified = False + + if os.environ.get("FLASK_ENV") == "testing": + verification.token = 1234567 + + model.db.session.add(new_user) + model.db.session.add(verification) + if os.environ.get("FLASK_ENV") != "testing": + if new_user.email_address in bypassed_emails: + new_user.email_verified = True + + model.db.session.commit() + + if g.gb.is_on("email-verification"): + if os.environ.get("FLASK_ENV") != "testing": + if new_user.email_address not in bypassed_emails: + send_email_verification(new_user.email_address, verification.token) + return f"Hi, {new_user.email_address}, you have successfully signed up", 201 + + +@api.route("/auth/email-verification/confirm") +class EmailVerification(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(contributors_model) + def post(self): + data: Union[Any, dict] = request.json + if "token" not in data or "email" not in data: + return "email or token are required", 422 + user = model.User.query.filter_by(email_address=data["email"]).one_or_none() + if not user: + return "user not found", 404 + if user.email_verified: + return "user already verified", 422 + if os.environ.get("FLASK_ENV") != "testing": + if not user.verify_token(data["token"]): + return "Token invalid or expired", 422 + user.email_verified = True + invitations = model.StudyInvitedContributor.query.filter_by( - email_address=data["email_address"] + email_address=data["email"] ).all() - - new_user = model.User.from_data(data) for invite in invitations: - invite.study.add_user_to_study(new_user, invite.permission) + invite.study.add_user_to_study(user, invite.permission) model.db.session.delete(invite) - model.db.session.add(new_user) model.db.session.commit() - return f"Hi, {new_user.email_address}, you have successfully signed up", 201 + return "Email verified", 201 + + +@api.route("/auth/email-verification/resend") +class GenerateVerification(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(contributors_model) + def post(self): + data: Union[Any, dict] = request.json + user = model.User.query.filter_by(email_address=data["email"]).one_or_none() + if not user: + return "user not found", 404 + if user.email_verified: + return "user already verified", 422 + + # user.email_verified = True + token = user.generate_token() + + if g.gb.is_on("email-verification"): + if os.environ.get("FLASK_ENV") != "testing": + send_email_verification(user.email_address, token) + + model.db.session.commit() + return "Your email is verified", 201 + + +@api.route("/auth/email-verification/check") +class GenerateVerificationCheck(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(contributors_model) + def post(self): + data: Union[Any, dict] = request.json + user = model.User.query.filter_by(email_address=data["email"]).one_or_none() + if not user: + return {"message": "User not found"}, 404 + return {"isVerified": user.email_verified}, 200 @api.route("/auth/login") @@ -173,7 +251,6 @@ def post(self): """logs in user and handles few authentication errors. Also, it sets token for logged user along with expiration date""" data: Union[Any, dict] = request.json - email_address = data["email_address"] def validate_is_valid_email(instance): @@ -207,10 +284,19 @@ def validate_is_valid_email(instance): validate(instance=data, schema=schema, format_checker=format_checker) except ValidationError as e: return e.message, 400 - user = model.User.query.filter_by(email_address=email_address).one_or_none() if not user: return "Invalid credentials", 401 + if os.environ.get("FLASK_ENV") != "testing": + bypassed_emails = [ + "test@fairhub.io", + "bpatel@fairhub.io", + "sanjay@fairhub.io", + "aydan@fairhub.io", + "cordier@ohsu.edu", + ] + if email_address in bypassed_emails: + user.email_verified = True validate_pass = user.check_password(data["password"]) @@ -249,11 +335,38 @@ def validate_is_valid_email(instance): ) resp = make_response(user.to_dict()) + if not user.email_verified: + return resp resp.set_cookie( "token", encoded_jwt_code, secure=True, httponly=True, samesite="None" ) + # if g.gb.is_on("email-verification"): + # if os.environ.get("FLASK_ENV") != "testing": + # if not check_trusted_device(): + # title = "you logged in" + # device_ip = request.remote_addr + # notification_type = "info" + # target = "" + # read = False + # send_notification = model.Notification.from_data( + # user, + # { + # "title": title, + # "message": device_ip, + # "type": notification_type, + # "target": target, + # "read": read, + # }, + # ) + # model.db.session.add(send_notification) + # model.db.session.commit() + # signin_notification(user, device_ip) + # add_user_to_device_list(resp, user) + # resp.status_code = 200 + g.token = jti added_session = model.Session.from_data(jti, expired_in.timestamp(), user) + model.db.session.add(added_session) model.db.session.commit() return resp @@ -325,7 +438,10 @@ def authorization(): if bool(re.search(route_pattern, request.path)): return if g.user: - return + if os.environ.get("FLASK_ENV") == "testing": + return + if g.user.email_verified: + return raise UnauthenticatedException("Access denied", 403) @@ -336,6 +452,9 @@ def is_granted(permission: str, study=None): ).first() if not contributor: return False + if os.environ.get("FLASK_ENV") != "testing": + if not g.user.email_verified: + return False role = { "owner": [ "owner", diff --git a/apis/contributor.py b/apis/contributor.py index 9f282312..d1850bca 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,10 +1,12 @@ from collections import OrderedDict from typing import Any, Dict, List, Union +import os from flask import Response, g, request from flask_restx import Namespace, Resource, fields import model +from invitation.invitation import send_access_contributors, send_invitation_study from .authentication import is_granted @@ -20,16 +22,14 @@ @api.route("/study//contributor") -class AddContributor(Resource): +class AllContributors(Resource): @api.doc("contributor list") @api.response(200, "Success") @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def get(self, study_id: int): contributors = model.StudyContributor.query.filter_by(study_id=study_id).all() - invited_contributors = model.StudyInvitedContributor.query.filter_by( - study_id=study_id - ).all() + invited_contributors = model.StudyInvitedContributor.query.filter_by(study_id=study_id).all() contributors_list = [c.to_dict() for c in contributors] + [ c.to_dict() for c in invited_contributors @@ -40,6 +40,7 @@ def get(self, study_id: int): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): + study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify study", 403 @@ -49,7 +50,9 @@ def post(self, study_id: int): user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] contributor_ = None - + study_name = study_obj.title + first_name = user.user_details.first_name if user else "" + last_name = user.user_details.last_name if user else "" try: if user: contributor_ = study_obj.add_user_to_study(user, permission) @@ -59,6 +62,23 @@ def post(self, study_id: int): except model.StudyException as ex: return ex.args[0], 409 model.db.session.commit() + if os.environ.get("FLASK_ENV") != "testing": + if g.gb.is_on("email-verification"): + if user: + send_access_contributors( + email_address, + study_obj, + first_name, + last_name, + contributor_.permission, + ) + else: + send_invitation_study( + email_address, + contributor_.token, + study_name, + contributor_.permission, + ) return contributor_.to_dict(), 201 @@ -100,7 +120,6 @@ def put(self, study_id: int, user_id: int): if not can_grant: return f"User cannot grant {permission}", 403 - # TODO: Owners downgrading themselves if user != g.user: grantee_level = list(grants.keys()).index(grantee.permission) # 1 new_level: int = list(grants.keys()).index(str(permission)) # 2 diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py index 404558fc..9f835def 100644 --- a/apis/dataset_metadata/dataset_rights.py +++ b/apis/dataset_metadata/dataset_rights.py @@ -37,7 +37,6 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume """Get dataset rights""" dataset_ = model.Dataset.query.get(dataset_id) dataset_rights_ = dataset_.dataset_rights - print([d.to_dict() for d in dataset_rights_]) return [d.to_dict() for d in dataset_rights_], 200 @api.doc("update rights") diff --git a/app.py b/app.py index 22d47b48..9d2db72a 100644 --- a/app.py +++ b/app.py @@ -8,9 +8,10 @@ import click import jwt -from flask import Flask, g, request +from flask import Flask, request, g from flask_bcrypt import Bcrypt from flask_cors import CORS +from flask_mailman import Mail from growthbook import GrowthBook from sqlalchemy import MetaData, inspect, text from sqlalchemy.ext.compiler import compiles @@ -21,12 +22,17 @@ import config import model from apis import api -from apis.authentication import UnauthenticatedException, authentication, authorization +from apis.authentication import ( + UnauthenticatedException, + authentication, + authorization, +) from apis.exception import ValidationException # from pyfairdatatools import __version__ bcrypt = Bcrypt() +mail = Mail() # Add Cascade to Table Drop Call in destroy-schema CLI command @@ -76,6 +82,7 @@ def create_app(config_module=None, loglevel="INFO"): bcrypt.init_app(app) caching.cache.init_app(app) + mail.init_app(app) cors_origins = [ "https://witty-mushroom-.*-.*.centralus.4.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string "https://brave-ground-.*-.*.centralus.2.azurestaticapps.net", # noqa E501 # pylint: disable=line-too-long # pylint: disable=anomalous-backslash-in-string diff --git a/config.py b/config.py index a198d1c4..22e48fdf 100644 --- a/config.py +++ b/config.py @@ -24,3 +24,6 @@ def get_env(key): FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_CONTAINER") FAIRHUB_GROWTHBOOK_CLIENT_KEY = get_env("FAIRHUB_GROWTHBOOK_CLIENT_KEY") +FAIRHUB_SMTP_CONNECTION_STRING = get_env("FAIRHUB_SMTP_CONNECTION_STRING") +FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS = get_env("FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS") +FAIRHUB_FRONTEND_URL = get_env("FAIRHUB_FRONTEND_URL") diff --git a/db.py b/db.py new file mode 100644 index 00000000..f0b13d6f --- /dev/null +++ b/db.py @@ -0,0 +1,3 @@ +from flask_sqlalchemy import SQLAlchemy + +db = SQLAlchemy() diff --git a/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py b/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py index 5fac771b..900bd0b9 100644 --- a/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py +++ b/dev/alembic.old/versions/72ac2b020c7c_delete_dataset_readme_table.py @@ -9,7 +9,6 @@ from typing import Sequence, Union import alembic -import sqlalchemy as sa # revision identifiers, used by Alembic. diff --git a/invitation/invitation.py b/invitation/invitation.py new file mode 100644 index 00000000..6583a057 --- /dev/null +++ b/invitation/invitation.py @@ -0,0 +1,202 @@ +from flask import render_template +import config +from azure.communication.email import EmailClient + + +def azure_email_connection(html_content, subject: str, to: str): + connection_string = config.FAIRHUB_SMTP_CONNECTION_STRING + email_client = EmailClient.from_connection_string(connection_string) + message = { + "content": { + "subject": subject, + "html": html_content + }, + "recipients": { + "to": [ + { + "address": to, + "displayName": "Customer Name" + } + ] + }, + "senderAddress": config.FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS + } + + email_client.begin_send(message) + # poller = email_client.begin_send(message) + # result = poller.result() + + +def send_invitation_study(to, token, study_name, role): + accept_url = f"{config.FAIRHUB_FRONTEND_URL}auth/signup?code={token}&email={to}" + html_content = render_template( + "accept_study_invitation.html", + token=token, + accept_url=accept_url, + study_name=study_name, + role=role, + to=to, + ) + subject, from_email, to = ( + f"You have been invited to {study_name} invitation", + "aydan.gasimova2@example.com", + to, + ) + + # msg = EmailMessage(subject, html_content, from_email, [to]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +def send_access_contributors(to, study, first_name, last_name, role): + accept_url = f"{config.FAIRHUB_FRONTEND_URL}study/{study.id}/overview" + html_content = render_template( + "invite_contributors.html", + accept_url=accept_url, + first_name=first_name, + last_name=last_name, + study_name=study.title, + study_id=study.id, + role=role, + ) + subject, from_email, to = ( + f"You have been invited to {study.title} invitation", + "aydan.gasimova2@example.com", + to, + ) + # msg = EmailMessage(subject, html_content, from_email, [to]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +# def send_invitation_general(to, token): +# accept_url = f"{config.FAIRHUB_FRONTEND_URL}auth/signup?code={token}&email={to}" +# html_content = render_template( +# "accept_general_invitation.html", token=token, accept_url=accept_url, to=to +# ) +# subject, from_email, to = ( +# f"You have been invited to signup to FAIRhub", +# "aydan.gasimova2@example.com", +# to, +# ) +# +# # msg = EmailMessage(subject, html_content, from_email, [to]) +# # msg.content_subtype = "html" +# # msg.send() +# azure_email_connection(html_content, subject) +# + +def send_email_verification(email_address, token): + verification_url = ( + f"{config.FAIRHUB_FRONTEND_URL}auth/verify-email?email={email_address}&token={token}" + ) + subject, from_email, to = ( + f"Verify email address", + "aydan.gasimova@example.com", + email_address, + ) + html_content = render_template( + "email_verification.html", + token=token, + verification_url=verification_url, + email=email_address, + ) + # msg = EmailMessage(subject, html_content, from_email, [email_address]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +def signin_notification(user, device_ip): + user_profile_url = f"{config.FAIRHUB_FRONTEND_URL}studies" + subject, from_email, to = ( + f"Login notification", + "aydan.gasimova2@example.com", + user.email_address, + ) + html_content = render_template( + "device_notification.html", + user_profile_url=user_profile_url, + device_ip=device_ip, + ) + # msg = EmailMessage(subject, html_content, from_email, [user.email_address]) + # msg.content_subtype = "html" + # msg.send() + azure_email_connection(html_content, subject, to) + + +# def get_config(): +# if os.environ.get("FLASK_ENV") == "testing": +# config_module_name = "pytest_config" +# else: +# config_module_name = "config" +# +# config_module = importlib.import_module(config_module_name) +# +# if os.environ.get("FLASK_ENV") == "testing": +# # If testing, use the 'TestConfig' class for accessing 'secret' +# config = config_module.TestConfig +# else: +# config = config_module +# +# return config + + +# Get list of user ids that have previously authenticated on this device +# def get_device_user_list() -> list[str]: +# # FIX THE TYPE OF THE TOKEN["USERS"], IT WAS GETTING ERROR SINCE token returns a dict instead of list +# # Check if cookie exists +# if "token_device" not in request.cookies: +# return [] +# +# # Get value from cookie +# cookie = request.cookies.get("token_device") +# if not cookie: +# return [] +# +# token = {} +# config = get_config() +# try: +# token = jwt.decode(cookie, config.FAIRHUB_SECRET, algorithms=["HS256"]) +# except jwt.ExpiredSignatureError: +# return [] +# +# if "users" not in token: +# return [] +# +# return token["users"] +# +# +# def add_user_to_device_list(response: Response, user) -> None: +# users = get_device_user_list() +# if user.id not in users: +# users.append(user.id) +# +# config = get_config() +# expiration = datetime.datetime.now(timezone.utc) + datetime.timedelta(days=365) +# cookie = jwt.encode( +# { +# "users": users, +# "exp": expiration, +# }, +# config.FAIRHUB_SECRET, +# algorithm="HS256", +# ) +# +# response.set_cookie( +# "token_device", +# cookie, +# secure=True, +# httponly=True, +# samesite="None", +# expires=expiration, +# ) + +# +# def check_trusted_device() -> bool: +# users = get_device_user_list() +# for user in users: +# print("User known: " + user) +# return g.user.id in users diff --git a/model/email_verification.py b/model/email_verification.py index 8961b8f2..43a582de 100644 --- a/model/email_verification.py +++ b/model/email_verification.py @@ -1,17 +1,22 @@ import datetime +import random +import uuid from datetime import timezone from .db import db class EmailVerification(db.Model): # type: ignore - def __init__(self): + def __init__(self, user): self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.id = str(uuid.uuid4()) + self.token = str(random.randint(10 ** (7 - 1), (10**7) - 1)) + self.user = user __tablename__ = "email_verification" id = db.Column(db.CHAR(36), primary_key=True) - token = db.Column(db.CHAR(36), nullable=False) - created_at = db.Column(db.CHAR(36), nullable=False) + token = db.Column(db.String, nullable=False) + created_at = db.Column(db.BigInteger, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) user = db.relationship("User", back_populates="email_verification") diff --git a/model/notification.py b/model/notification.py index b123c188..72fdfae4 100644 --- a/model/notification.py +++ b/model/notification.py @@ -6,17 +6,18 @@ class Notification(db.Model): # type: ignore - def __init__(self): + def __init__(self, user): self.id = str(uuid.uuid4()) self.created_at = datetime.datetime.now(timezone.utc).timestamp() + self.user = user __tablename__ = "notification" id = db.Column(db.CHAR(36), primary_key=True) - title = db.Column(db.String, nullable=True) - message = db.Column(db.String, nullable=True) - type = db.Column(db.String, nullable=True) + title = db.Column(db.String, nullable=False) + message = db.Column(db.String, nullable=False) + type = db.Column(db.String, nullable=False) target = db.Column(db.String, nullable=True) - read = db.Column(db.BOOLEAN, nullable=True) + read = db.Column(db.BOOLEAN, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) user_id = db.Column(db.CHAR(36), db.ForeignKey("user.id"), nullable=False) @@ -34,10 +35,10 @@ def to_dict(self): } @staticmethod - def from_data(data: dict): - user = Notification() - user.update(data) - return user + def from_data(user, data: dict): + notification = Notification(user) + notification.update(data) + return notification def update(self, data: dict): self.title = data["title"] diff --git a/model/study.py b/model/study.py index fa7a05e4..4846685d 100644 --- a/model/study.py +++ b/model/study.py @@ -179,7 +179,9 @@ def to_dict(self): self.study_description.brief_summary if self.study_description else None ), "owner": owner.to_dict()["id"] if owner else None, - "role": contributor_permission.to_dict()["role"], + "role": contributor_permission.to_dict()["role"] + if contributor_permission + else None, } def to_dict_study_metadata(self): diff --git a/model/user.py b/model/user.py index c27a1419..8215fb7f 100644 --- a/model/user.py +++ b/model/user.py @@ -15,29 +15,59 @@ def __init__(self, password): self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() self.set_password(password) self.user_details = model.UserDetails(self) + self.email_verified = False + db.Column(db.BigInteger, nullable=False) __tablename__ = "user" id = db.Column(db.CHAR(36), primary_key=True) email_address = db.Column(db.String, nullable=False, unique=True) username = db.Column(db.String, nullable=False, unique=True) hash = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) - email_verified = db.Column(db.String, nullable=True) + email_verified = db.Column(db.BOOLEAN, nullable=True) study_contributors = db.relationship("StudyContributor", back_populates="user") email_verification = db.relationship("EmailVerification", back_populates="user") user_details = db.relationship("UserDetails", uselist=False, back_populates="user") token_blacklist = db.relationship("TokenBlacklist", back_populates="user") notification = db.relationship("Notification", back_populates="user") + study_contributors = db.relationship( + "StudyContributor", + back_populates="user", + cascade="all, delete", + ) + email_verification = db.relationship( + "EmailVerification", + back_populates="user", + cascade="all, delete", + ) + user_details = db.relationship( + "UserDetails", + uselist=False, + back_populates="user", + cascade="all, delete", + ) + token_blacklist = db.relationship( + "TokenBlacklist", + back_populates="user", + cascade="all, delete", + ) + notification = db.relationship( + "Notification", + back_populates="user", + cascade="all, delete", + ) session = db.relationship("Session", back_populates="user") def to_dict(self): + # latest_object = max(self.email_verification, key=lambda x: x.created_at) if self.email_verification else None return { "id": self.id, "email_address": self.email_address, "username": self.username, "first_name": self.user_details.first_name if self.user_details else None, "last_name": self.user_details.last_name if self.user_details else None, + "email_verified": self.email_verified, } @staticmethod @@ -67,3 +97,33 @@ def check_password(self, password: str): app.bcrypt.generate_password_hash(password).decode("utf-8") is_valid = app.bcrypt.check_password_hash(self.hash, password) return is_valid + + def verify_token(self, token: str) -> bool: + latest_object = ( + max(self.email_verification, key=lambda x: x.created_at) + if self.email_verification + else None + ) + if not latest_object or token != latest_object.token: + return False + current_time = datetime.datetime.now() + datetime_obj = datetime.datetime.fromtimestamp(latest_object.created_at) + formatted_time = datetime_obj.strftime("%Y-%m-%d %H:%M:%S.%f") + created_time = datetime.datetime.strptime( + formatted_time, "%Y-%m-%d %H:%M:%S.%f" + ) + return current_time - created_time < datetime.timedelta(minutes=15) + + def generate_token(self) -> str: + email_verification = model.EmailVerification(self) + db.session.add(email_verification) + db.session.commit() + return email_verification.token + + def change_email(self, email: str): + if email == self.email_address: + return + + self.email_verified = False + self.email_address = email + self.generate_token() diff --git a/poetry.lock b/poetry.lock index fb4fa708..04113f55 100644 --- a/poetry.lock +++ b/poetry.lock @@ -247,6 +247,35 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "azure-common" +version = "1.1.28" +description = "Microsoft Azure Client Library for Python (Common)" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, + {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, +] + +[[package]] +name = "azure-communication-email" +version = "1.0.0" +description = "Microsoft Azure MyService Management Client Library for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "azure-communication-email-1.0.0.zip", hash = "sha256:5df96b8b4389696244982ffc3740722f1948abb289f19af00ce2e1c534450095"}, + {file = "azure_communication_email-1.0.0-py3-none-any.whl", hash = "sha256:b580ccfc9f1372d0b65f235334e569f3909894316bc3203bd9deb5760612693a"}, +] + +[package.dependencies] +azure-common = ">=1.1,<2.0" +azure-mgmt-core = ">=1.3.2,<2.0.0" +msrest = ">=0.7.1" + [[package]] name = "azure-core" version = "1.32.0" @@ -267,6 +296,21 @@ typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] +[[package]] +name = "azure-mgmt-core" +version = "1.5.0" +description = "Microsoft Azure Management Core Library for Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_mgmt_core-1.5.0-py3-none-any.whl", hash = "sha256:18aaa5a723ee8ae05bf1bfc9f6d0ffb996631c7ea3c922cc86f522973ce07b5f"}, + {file = "azure_mgmt_core-1.5.0.tar.gz", hash = "sha256:380ae3dfa3639f4a5c246a7db7ed2d08374e88230fd0da3eb899f7c11e5c441a"}, +] + +[package.dependencies] +azure-core = ">=1.31.0" + [[package]] name = "azure-storage-blob" version = "12.24.1" @@ -696,74 +740,75 @@ test = ["pytest"] [[package]] name = "coverage" -version = "7.6.10" +version = "7.6.12" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, - {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, - {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, - {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, - {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, - {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, - {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, - {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, - {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, - {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, - {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, - {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, - {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, - {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, - {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, - {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.dependencies] @@ -1129,6 +1174,26 @@ files = [ blinker = "*" Flask = "*" +[[package]] +name = "flask-mailman" +version = "1.1.1" +description = "Porting Django's email implementation to your Flask applications." +category = "main" +optional = false +python-versions = "<4.0,>=3.7" +files = [ + {file = "flask_mailman-1.1.1-py3-none-any.whl", hash = "sha256:0a66ead606b2ec9e4371d727f82709c7a51270bc5306be57c9f4ce0ed29dbe57"}, + {file = "flask_mailman-1.1.1.tar.gz", hash = "sha256:3bc1ffffbd655ba9e468946a5f02e9cc772594fe1e98ace636c2f6717419eefa"}, +] + +[package.dependencies] +flask = ">=1.0" + +[package.extras] +dev = ["bump2version", "pip", "pre-commit", "toml", "tox", "twine", "virtualenv"] +doc = ["mkdocs", "mkdocs-autorefs", "mkdocs-include-markdown-plugin", "mkdocs-material", "mkdocs-material-extensions", "mkdocstrings"] +test = ["aiosmtpd (>=1.4.4.post2,<2.0.0)", "black", "flake8", "isort", "pytest", "pytest-cov"] + [[package]] name = "flask-restx" version = "1.3.0" @@ -2187,14 +2252,14 @@ files = [ [[package]] name = "mistune" -version = "3.1.1" +version = "3.1.2" description = "A sane and fast Markdown parser with useful plugins and renderers" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "mistune-3.1.1-py3-none-any.whl", hash = "sha256:02106ac2aa4f66e769debbfa028509a275069dcffce0dfa578edd7b991ee700a"}, - {file = "mistune-3.1.1.tar.gz", hash = "sha256:e0740d635f515119f7d1feb6f9b192ee60f0cc649f80a8f944f905706a21654c"}, + {file = "mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319"}, + {file = "mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff"}, ] [package.dependencies] @@ -2227,52 +2292,68 @@ watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] +[[package]] +name = "msrest" +version = "0.7.1" +description = "AutoRest swagger generator Python client runtime." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32"}, + {file = "msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"}, +] + +[package.dependencies] +azure-core = ">=1.24.0" +certifi = ">=2017.4.17" +isodate = ">=0.6.0" +requests = ">=2.16,<3.0" +requests-oauthlib = ">=0.5.0" + +[package.extras] +async = ["aiodns", "aiohttp (>=3.0)"] + [[package]] name = "mypy" -version = "1.14.1" +version = "1.15.0" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, - {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, - {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, - {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, - {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, - {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, - {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, - {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, - {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, - {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, - {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, - {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, - {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, - {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, - {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, - {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, - {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, - {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, - {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, - {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, - {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, - {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, - {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, - {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, - {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, - {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, - {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, - {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, - {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, - {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, - {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, - {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, - {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, - {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, - {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, - {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, - {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, - {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] @@ -2481,6 +2562,23 @@ files = [ {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + [[package]] name = "overrides" version = "7.7.0" @@ -2765,33 +2863,26 @@ wcwidth = "*" [[package]] name = "psutil" -version = "6.1.1" -description = "Cross-platform lib for process and system monitoring in Python." +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.6" files = [ - {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, - {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, - {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"}, - {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"}, - {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"}, - {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"}, - {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"}, - {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"}, - {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"}, - {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"}, - {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"}, - {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"}, - {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"}, - {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"}, - {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"}, - {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"}, - {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, ] [package.extras] -dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] [[package]] @@ -3488,6 +3579,25 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +category = "main" +optional = false +python-versions = ">=3.4" +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + [[package]] name = "rfc3339-validator" version = "0.1.4" @@ -3732,69 +3842,69 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.37" +version = "2.0.38" description = "Database Abstraction Library" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da36c3b0e891808a7542c5c89f224520b9a16c7f5e4d6a1156955605e54aef0e"}, - {file = "SQLAlchemy-2.0.37-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e7402ff96e2b073a98ef6d6142796426d705addd27b9d26c3b32dbaa06d7d069"}, - {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6f5d254a22394847245f411a2956976401e84da4288aa70cbcd5190744062c1"}, - {file = "SQLAlchemy-2.0.37-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41296bbcaa55ef5fdd32389a35c710133b097f7b2609d8218c0eabded43a1d84"}, - {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bedee60385c1c0411378cbd4dc486362f5ee88deceea50002772912d798bb00f"}, - {file = "SQLAlchemy-2.0.37-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6c67415258f9f3c69867ec02fea1bf6508153709ecbd731a982442a590f2b7e4"}, - {file = "SQLAlchemy-2.0.37-cp310-cp310-win32.whl", hash = "sha256:650dcb70739957a492ad8acff65d099a9586b9b8920e3507ca61ec3ce650bb72"}, - {file = "SQLAlchemy-2.0.37-cp310-cp310-win_amd64.whl", hash = "sha256:93d1543cd8359040c02b6614421c8e10cd7a788c40047dbc507ed46c29ae5636"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78361be6dc9073ed17ab380985d1e45e48a642313ab68ab6afa2457354ff692c"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b661b49d0cb0ab311a189b31e25576b7ac3e20783beb1e1817d72d9d02508bf5"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d57bafbab289e147d064ffbd5cca2d7b1394b63417c0636cea1f2e93d16eb9e8"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2c0913f02341d25fb858e4fb2031e6b0813494cca1ba07d417674128ce11b"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9df21b8d9e5c136ea6cde1c50d2b1c29a2b5ff2b1d610165c23ff250e0704087"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db18ff6b8c0f1917f8b20f8eca35c28bbccb9f83afa94743e03d40203ed83de9"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-win32.whl", hash = "sha256:46954173612617a99a64aee103bcd3f078901b9a8dcfc6ae80cbf34ba23df989"}, - {file = "SQLAlchemy-2.0.37-cp311-cp311-win_amd64.whl", hash = "sha256:7b7e772dc4bc507fdec4ee20182f15bd60d2a84f1e087a8accf5b5b7a0dcf2ba"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2952748ecd67ed3b56773c185e85fc084f6bdcdec10e5032a7c25a6bc7d682ef"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3151822aa1db0eb5afd65ccfafebe0ef5cda3a7701a279c8d0bf17781a793bb4"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaa8039b6d20137a4e02603aba37d12cd2dde7887500b8855356682fc33933f4"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cdba1f73b64530c47b27118b7053b8447e6d6f3c8104e3ac59f3d40c33aa9fd"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1b2690456528a87234a75d1a1644cdb330a6926f455403c8e4f6cad6921f9098"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf5ae8a9dcf657fd72144a7fd01f243236ea39e7344e579a121c4205aedf07bb"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-win32.whl", hash = "sha256:ea308cec940905ba008291d93619d92edaf83232ec85fbd514dcb329f3192761"}, - {file = "SQLAlchemy-2.0.37-cp312-cp312-win_amd64.whl", hash = "sha256:635d8a21577341dfe4f7fa59ec394b346da12420b86624a69e466d446de16aff"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c4096727193762e72ce9437e2a86a110cf081241919ce3fab8e89c02f6b6658"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e4fb5ac86d8fe8151966814f6720996430462e633d225497566b3996966b9bdb"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e56a139bfe136a22c438478a86f8204c1eb5eed36f4e15c4224e4b9db01cb3e4"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f95fc8e3f34b5f6b3effb49d10ac97c569ec8e32f985612d9b25dd12d0d2e94"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c505edd429abdfe3643fa3b2e83efb3445a34a9dc49d5f692dd087be966020e0"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:12b0f1ec623cccf058cf21cb544f0e74656618165b083d78145cafde156ea7b6"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-win32.whl", hash = "sha256:293f9ade06b2e68dd03cfb14d49202fac47b7bb94bffcff174568c951fbc7af2"}, - {file = "SQLAlchemy-2.0.37-cp313-cp313-win_amd64.whl", hash = "sha256:d70f53a0646cc418ca4853da57cf3ddddbccb8c98406791f24426f2dd77fd0e2"}, - {file = "SQLAlchemy-2.0.37-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44f569d0b1eb82301b92b72085583277316e7367e038d97c3a1a899d9a05e342"}, - {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2eae3423e538c10d93ae3e87788c6a84658c3ed6db62e6a61bb9495b0ad16bb"}, - {file = "SQLAlchemy-2.0.37-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfff7be361048244c3aa0f60b5e63221c5e0f0e509f4e47b8910e22b57d10ae7"}, - {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:5bc3339db84c5fb9130ac0e2f20347ee77b5dd2596ba327ce0d399752f4fce39"}, - {file = "SQLAlchemy-2.0.37-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:84b9f23b0fa98a6a4b99d73989350a94e4a4ec476b9a7dfe9b79ba5939f5e80b"}, - {file = "SQLAlchemy-2.0.37-cp37-cp37m-win32.whl", hash = "sha256:51bc9cfef83e0ac84f86bf2b10eaccb27c5a3e66a1212bef676f5bee6ef33ebb"}, - {file = "SQLAlchemy-2.0.37-cp37-cp37m-win_amd64.whl", hash = "sha256:8e47f1af09444f87c67b4f1bb6231e12ba6d4d9f03050d7fc88df6d075231a49"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6b788f14c5bb91db7f468dcf76f8b64423660a05e57fe277d3f4fad7b9dcb7ce"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521ef85c04c33009166777c77e76c8a676e2d8528dc83a57836b63ca9c69dcd1"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75311559f5c9881a9808eadbeb20ed8d8ba3f7225bef3afed2000c2a9f4d49b9"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cce918ada64c956b62ca2c2af59b125767097ec1dca89650a6221e887521bfd7"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9d087663b7e1feabea8c578d6887d59bb00388158e8bff3a76be11aa3f748ca2"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cf95a60b36997dad99692314c4713f141b61c5b0b4cc5c3426faad570b31ca01"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-win32.whl", hash = "sha256:d75ead7dd4d255068ea0f21492ee67937bd7c90964c8f3c2bea83c7b7f81b95f"}, - {file = "SQLAlchemy-2.0.37-cp38-cp38-win_amd64.whl", hash = "sha256:74bbd1d0a9bacf34266a7907d43260c8d65d31d691bb2356f41b17c2dca5b1d0"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:648ec5acf95ad59255452ef759054f2176849662af4521db6cb245263ae4aa33"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:35bd2df269de082065d4b23ae08502a47255832cc3f17619a5cea92ce478b02b"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f581d365af9373a738c49e0c51e8b18e08d8a6b1b15cc556773bcd8a192fa8b"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82df02816c14f8dc9f4d74aea4cb84a92f4b0620235daa76dde002409a3fbb5a"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94b564e38b344d3e67d2e224f0aec6ba09a77e4582ced41e7bfd0f757d926ec9"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:955a2a765aa1bd81aafa69ffda179d4fe3e2a3ad462a736ae5b6f387f78bfeb8"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-win32.whl", hash = "sha256:03f0528c53ca0b67094c4764523c1451ea15959bbf0a8a8a3096900014db0278"}, - {file = "SQLAlchemy-2.0.37-cp39-cp39-win_amd64.whl", hash = "sha256:4b12885dc85a2ab2b7d00995bac6d967bffa8594123b02ed21e8eb2205a7584b"}, - {file = "SQLAlchemy-2.0.37-py3-none-any.whl", hash = "sha256:a8998bf9f8658bd3839cbc44ddbe982955641863da0c1efe5b00c1ab4f5c16b1"}, - {file = "sqlalchemy-2.0.37.tar.gz", hash = "sha256:12b28d99a9c14eaf4055810df1001557176716de0167b91026e648e65229bffb"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:402c2316d95ed90d3d3c25ad0390afa52f4d2c56b348f212aa9c8d072a40eee5"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6493bc0eacdbb2c0f0d260d8988e943fee06089cd239bd7f3d0c45d1657a70e2"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0561832b04c6071bac3aad45b0d3bb6d2c4f46a8409f0a7a9c9fa6673b41bc03"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49aa2cdd1e88adb1617c672a09bf4ebf2f05c9448c6dbeba096a3aeeb9d4d443"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-win32.whl", hash = "sha256:64aa8934200e222f72fcfd82ee71c0130a9c07d5725af6fe6e919017d095b297"}, + {file = "SQLAlchemy-2.0.38-cp310-cp310-win_amd64.whl", hash = "sha256:c57b8e0841f3fce7b703530ed70c7c36269c6d180ea2e02e36b34cb7288c50c7"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf89e0e4a30714b357f5d46b6f20e0099d38b30d45fa68ea48589faf5f12f62d"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8455aa60da49cb112df62b4721bd8ad3654a3a02b9452c783e651637a1f21fa2"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f53c0d6a859b2db58332e0e6a921582a02c1677cc93d4cbb36fdf49709b327b2"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c4817dff8cef5697f5afe5fec6bc1783994d55a68391be24cb7d80d2dbc3a6"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9cea5b756173bb86e2235f2f871b406a9b9d722417ae31e5391ccaef5348f2c"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40e9cdbd18c1f84631312b64993f7d755d85a3930252f6276a77432a2b25a2f3"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-win32.whl", hash = "sha256:cb39ed598aaf102251483f3e4675c5dd6b289c8142210ef76ba24aae0a8f8aba"}, + {file = "SQLAlchemy-2.0.38-cp311-cp311-win_amd64.whl", hash = "sha256:f9d57f1b3061b3e21476b0ad5f0397b112b94ace21d1f439f2db472e568178ae"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12d5b06a1f3aeccf295a5843c86835033797fea292c60e72b07bcb5d820e6dd3"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e036549ad14f2b414c725349cce0772ea34a7ab008e9cd67f9084e4f371d1f32"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3bee874cb1fadee2ff2b79fc9fc808aa638670f28b2145074538d4a6a5028e"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e185ea07a99ce8b8edfc788c586c538c4b1351007e614ceb708fd01b095ef33e"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b79ee64d01d05a5476d5cceb3c27b5535e6bb84ee0f872ba60d9a8cd4d0e6579"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afd776cf1ebfc7f9aa42a09cf19feadb40a26366802d86c1fba080d8e5e74bdd"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-win32.whl", hash = "sha256:a5645cd45f56895cfe3ca3459aed9ff2d3f9aaa29ff7edf557fa7a23515a3725"}, + {file = "SQLAlchemy-2.0.38-cp312-cp312-win_amd64.whl", hash = "sha256:1052723e6cd95312f6a6eff9a279fd41bbae67633415373fdac3c430eca3425d"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ecef029b69843b82048c5b347d8e6049356aa24ed644006c9a9d7098c3bd3bfd"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c8bcad7fc12f0cc5896d8e10fdf703c45bd487294a986903fe032c72201596b"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0ef3f98175d77180ffdc623d38e9f1736e8d86b6ba70bff182a7e68bed7727"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ac78898c50e2574e9f938d2e5caa8fe187d7a5b69b65faa1ea4648925b096"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eb4fa13c8c7a2404b6a8e3772c17a55b1ba18bc711e25e4d6c0c9f5f541b02a"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, + {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, + {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, + {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, + {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, + {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, + {file = "sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb"}, ] [package.dependencies] @@ -4374,4 +4484,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "cbed924469a5f0bbc91cec5bd6f054f02434a02bcc481059fffab9481583adc0" +content-hash = "6e0584b39354841ea870ef44b3b8c27ffde8cb292d8a1dad071340ab5625682b" diff --git a/pyproject.toml b/pyproject.toml index a1b9d064..0a38562f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,8 +36,10 @@ Flask-Cors = "^4.0.0" flask-restx = "^1.1.0" waitress = "^2.1.2" -# Email +# Email flask-mail = "^0.9.1" +flask-mailman = "^1.0.0" +azure-communication-email = "^1.0.0" # API Client requests = "^2.31.0" diff --git a/sql/init.sql b/sql/init.sql index 824179be..ce042e38 100644 --- a/sql/init.sql +++ b/sql/init.sql @@ -386,23 +386,23 @@ INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; --- Dumping structure for table public.invited_study_contributor -CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( +-- Dumping structure for table public.invite +CREATE TABLE IF NOT EXISTS "invite" ( "email_address" VARCHAR NOT NULL, "permission" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, "invited_on" BIGINT NOT NULL, PRIMARY KEY ("email_address", "study_id"), - CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "invite_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping data for table public.invited_study_contributor: 3 rows -/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; -INSERT INTO "invited_study_contributor" ("email_address", "permission", "study_id", "invited_on") VALUES +-- Dumping data for table public.invite: 3 rows +/*!40000 ALTER TABLE "invite" DISABLE KEYS */; +INSERT INTO "invite" ("email_address", "permission", "study_id", "invited_on") VALUES ('Aliya_Herman@yahoo.com', 'editor', '00000000-0000-0000-0000-000000000001', 1693805470), ('Anastacio50@hotmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), ('Edward0@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470); -/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; +/*!40000 ALTER TABLE "invite" ENABLE KEYS */; -- Dumping structure for table public.participant CREATE TABLE IF NOT EXISTS "participant" ( diff --git a/sql/init_timezones.sql b/sql/init_timezones.sql index bda50aa5..b38ac176 100644 --- a/sql/init_timezones.sql +++ b/sql/init_timezones.sql @@ -438,24 +438,24 @@ INSERT INTO "dataset_title" ("id", "title", "type", "dataset_id") VALUES ('02937b58-268d-486d-ad63-55a79b39ea9c', 'title', 'na', '00000000-0000-0000-0000-000000000001'); /*!40000 ALTER TABLE "dataset_title" ENABLE KEYS */; --- Dumping structure for table public.invited_study_contributor -CREATE TABLE IF NOT EXISTS "invited_study_contributor" ( +-- Dumping structure for table public.invite +CREATE TABLE IF NOT EXISTS "invite" ( "email_address" VARCHAR NOT NULL, "permission" VARCHAR NOT NULL, "study_id" CHAR(36) NOT NULL, "invited_on" BIGINT NOT NULL, PRIMARY KEY ("email_address", "study_id"), - CONSTRAINT "invited_study_contributor_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION + CONSTRAINT "invite_study_id_fkey" FOREIGN KEY ("study_id") REFERENCES "study" ("id") ON UPDATE NO ACTION ON DELETE NO ACTION ); --- Dumping data for table public.invited_study_contributor: -1 rows -/*!40000 ALTER TABLE "invited_study_contributor" DISABLE KEYS */; -INSERT INTO "invited_study_contributor" ("email_address", "permission", "study_id", "invited_on") VALUES +-- Dumping data for table public.invite: -1 rows +/*!40000 ALTER TABLE "invite" DISABLE KEYS */; +INSERT INTO "invite" ("email_address", "permission", "study_id", "invited_on") VALUES ('Aliya_Herman@yahoo.com', 'editor', '00000000-0000-0000-0000-000000000001', 1693805470), ('Anastacio50@hotmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), ('Edward0@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000001', 1693805470), ('Jailyn17@gmail.com', 'viewer', '00000000-0000-0000-0000-000000000002', 1693805470); -/*!40000 ALTER TABLE "invited_study_contributor" ENABLE KEYS */; +/*!40000 ALTER TABLE "invite" ENABLE KEYS */; -- Dumping structure for table public.participant CREATE TABLE IF NOT EXISTS "participant" ( diff --git a/sql/specific_tables.sql b/sql/specific_tables.sql index 4147a2e5..ca90c0e8 100644 --- a/sql/specific_tables.sql +++ b/sql/specific_tables.sql @@ -46,7 +46,7 @@ INSERT INTO "dataset_contributor" ("id", "first_name", "last_name", "name_type", ('00000000-0000-0000-0000-000000000001', 'bhavesh', 'patel', 'type_name', 'identifier', 'scheme', 'scheme uri', 'true', 'type', '00000000-0000-0000-0000-000000000001'); -INSERT INTO "invited_study_contributor" ("email_address", "permission", "invited_on", "study_id") VALUES +INSERT INTO "invite" ("email_address", "permission", "invited_on", "study_id") VALUES ('aydan.gasimova@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000001'), ('bhavesh.patel@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000003'), ('sanjay.soundarajan@@gmail.com', 'owner', '2023-08-13 16:34:16', '00000000-0000-0000-0000-000000000004'); diff --git a/templates/accept_general_invitation.html b/templates/accept_general_invitation.html new file mode 100644 index 00000000..68e3731f --- /dev/null +++ b/templates/accept_general_invitation.html @@ -0,0 +1,61 @@ + + + + + Invitation + + + +
+ +

Hello!

+
+

Congratulations! You're one step away from joining fairhub.io and discovering a whole new experience tailored just for you.

+

Please click the button to signup.

+ Signup + +

Or see here: + + {{ accept_url }} + +

+ +

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance. +

+ + +
+ + +
+ + + \ No newline at end of file diff --git a/templates/accept_study_invitation.html b/templates/accept_study_invitation.html new file mode 100644 index 00000000..d7aeecc0 --- /dev/null +++ b/templates/accept_study_invitation.html @@ -0,0 +1,57 @@ + + + + Invitation + + + +
+
+

Hello!

+
+

We are pleased to grant you {{ 'a' if role == "viewer" else "an"}} {{ role }} access to {{ study_name }}!

+

Your authorization enables you to delve into this specialized + study area and explore its curated resources.

+

Please click the button to accept the invitation.

+ Accept Invitation and Signup + +

Or see here: + + {{ accept_url }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+
+ diff --git a/templates/device_notification.html b/templates/device_notification.html new file mode 100644 index 00000000..a7e13890 --- /dev/null +++ b/templates/device_notification.html @@ -0,0 +1,61 @@ + + + + + Email verification + + + + +
+ +

Hello!

+
+

We detected you have logged in from {{ device_ip }}

+

If you do not recall this operation please click to change your password

+ Change Your Password +

+ Or see here: + http://localhost:3000/studies +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+ + + \ No newline at end of file diff --git a/templates/email_verification.html b/templates/email_verification.html new file mode 100644 index 00000000..eb575352 --- /dev/null +++ b/templates/email_verification.html @@ -0,0 +1,55 @@ + + + + + Email verification + + + + +
+ +
+

Hello {{ email}}!

+ +
+

Please verify your email address by clicking the button below:

+ Verify Email Address + +

Or see here: + + {{ verification_url }} +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+ + + \ No newline at end of file diff --git a/templates/invite_contributors.html b/templates/invite_contributors.html new file mode 100644 index 00000000..853b3bf6 --- /dev/null +++ b/templates/invite_contributors.html @@ -0,0 +1,67 @@ + + + + + Invitation + + + + +
+ +

Hello {{ ' ' ~ first_name if first_name else '' }}{{ ' ' ~ last_name if first_name else '' }}!

+
+

We are pleased to grant you {{ 'a' if role == "viewer" else "an"}} {{ role }} access to {{ study_name }}!

+

Your authorization enables you to delve into this specialized + study area and explore its curated resources.

+

Please click the button to accept the invitation:

+ View Study + +

Or see here: + + {{ accept_url }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at + help@fairhub.io + for prompt assistance. +

+
+
Sincerely,
+
FAIRhub team
+
+ + + \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index b2717e01..bccbd1ec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,7 +6,6 @@ import pytest from dotenv import load_dotenv - from app import create_app from model.db import db from pytest_config import TestConfig @@ -150,8 +149,24 @@ def _create_user(_test_client): "code": "", }, ) + assert response.status_code == 201 + + +@pytest.fixture() +def _verified_client(flask_app): + """Verify the user for testing.""" + with flask_app.test_client() as _test_client: + response = _test_client.post( + "/auth/email-verification/confirm", + json={ + "email": "test@fairhub.io", + "token": 1234567, + }, + ) assert response.status_code == 201 + response.close() + yield _test_client # Fixture to sign in the user for module testing @@ -186,7 +201,6 @@ def _test_invite_study_contributor(_logged_in_client): assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_editor_token = response_data["token"] response = _logged_in_client.post( @@ -255,10 +269,45 @@ def _create_viewer_user(flask_app): "code": pytest.global_viewer_token, }, ) - assert response.status_code == 201 +@pytest.fixture(scope="session") +def _user_verification_for_testing(flask_app): + """Create a viewer user for testing.""" + with flask_app.test_client() as _test_client: + with unittest.mock.patch("pytest_config.TestConfig", TestConfig): + a_response = _test_client.post( + "/auth/email-verification/confirm", + json={ + "email": "admin@fairhub.io", + "token": 1234567, + }, + ) + + assert a_response.status_code == 201 + + e_response = _test_client.post( + "/auth/email-verification/confirm", + json={ + "email": "editor@fairhub.io", + "token": 1234567, + }, + ) + + assert e_response.status_code == 201 + + v_response = _test_client.post( + "/auth/email-verification/confirm", + json={ + "email": "viewer@fairhub.io", + "token": 1234567, + }, + ) + + assert v_response.status_code == 201 + + @pytest.fixture(scope="session") def clients(flask_app): """Signs in all clients needed for testing""" diff --git a/tests/functional/test_server_launch.py b/tests/functional/test_010_server_launch.py similarity index 89% rename from tests/functional/test_server_launch.py rename to tests/functional/test_010_server_launch.py index c97fe78f..0c6fab97 100644 --- a/tests/functional/test_server_launch.py +++ b/tests/functional/test_010_server_launch.py @@ -27,6 +27,11 @@ def test_db_empty(_test_client, _empty_db, _create_user): print("User created for testing") +def test_verify_user(_verified_client): + """Verifies user before testing.""" + print("User verified for testing") + + def test_signin_user(_logged_in_client): """Signs in user before testing.""" print("User signed in for testing") diff --git a/tests/functional/test_study_api.py b/tests/functional/test_020_study_api.py similarity index 98% rename from tests/functional/test_study_api.py rename to tests/functional/test_020_study_api.py index 3e4eb71d..ec2975c7 100644 --- a/tests/functional/test_study_api.py +++ b/tests/functional/test_020_study_api.py @@ -50,6 +50,11 @@ def test_viewer_editor_user(_create_viewer_user): print("Viewer user created for testing") +def test_user_verification(_user_verification_for_testing): + """User are verified for testing""" + print("Users are verified for testing") + + def test_signin_all_clients(clients): """Signs in all clients for verifying permissions before testing continues.""" _logged_in_client, _admin_client, _editor_client, _viewer_client = clients diff --git a/tests/functional/test_study_dataset_api.py b/tests/functional/test_030_study_dataset_api.py similarity index 100% rename from tests/functional/test_study_dataset_api.py rename to tests/functional/test_030_study_dataset_api.py diff --git a/tests/functional/test_study_dataset_metadata_api.py b/tests/functional/test_040_study_dataset_metadata_api.py similarity index 99% rename from tests/functional/test_study_dataset_metadata_api.py rename to tests/functional/test_040_study_dataset_metadata_api.py index aad43499..e6a9d927 100644 --- a/tests/functional/test_study_dataset_metadata_api.py +++ b/tests/functional/test_040_study_dataset_metadata_api.py @@ -3145,7 +3145,6 @@ def test_get_dataset_related_identifier_metadata(clients): # assert len(admin_response_data) == 3 # assert len(editor_response_data) == 3 # assert len(viewer_response_data) == 3 - print(len(response_data), "lennnnnnnnnn") assert response_data[0]["identifier"] == "test identifier" assert response_data[0]["identifier_type"] == "test identifier type" assert response_data[0]["relation_type"] == "test relation type" diff --git a/tests/functional/test_study_metadata_api.py b/tests/functional/test_050_study_metadata_api.py similarity index 100% rename from tests/functional/test_study_metadata_api.py rename to tests/functional/test_050_study_metadata_api.py diff --git a/tests/functional/test_study_version_api.py b/tests/functional/test_060_study_version_api.py similarity index 100% rename from tests/functional/test_study_version_api.py rename to tests/functional/test_060_study_version_api.py diff --git a/tests/functional/test_user.py b/tests/functional/test_070_user.py similarity index 100% rename from tests/functional/test_user.py rename to tests/functional/test_070_user.py From 998ad3125f19da2fc22b58631aea4f161c108a6c Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 20 Feb 2025 13:49:24 -0800 Subject: [PATCH 482/505] fix: invitation location --- apis/authentication.py | 2 +- apis/contributor.py | 2 +- invitation/invitation.py => invitation.py | 0 3 files changed, 2 insertions(+), 2 deletions(-) rename invitation/invitation.py => invitation.py (100%) diff --git a/apis/authentication.py b/apis/authentication.py index 27202edc..07121e39 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -17,7 +17,7 @@ from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate import model -from invitation.invitation import ( +from invitation import ( send_email_verification, ) diff --git a/apis/contributor.py b/apis/contributor.py index d1850bca..3cfd2695 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -6,7 +6,7 @@ from flask_restx import Namespace, Resource, fields import model -from invitation.invitation import send_access_contributors, send_invitation_study +from invitation import send_access_contributors, send_invitation_study from .authentication import is_granted diff --git a/invitation/invitation.py b/invitation.py similarity index 100% rename from invitation/invitation.py rename to invitation.py From 231ac98e857269b3a0d0906e220697c31f44a0d6 Mon Sep 17 00:00:00 2001 From: aydawka Date: Thu, 20 Feb 2025 14:59:54 -0800 Subject: [PATCH 483/505] fix: invitation issue --- apis/authentication.py | 2 +- apis/contributor.py | 2 +- modules/__init__.py | 5 +++++ invitation.py => modules/invitation.py | 0 4 files changed, 7 insertions(+), 2 deletions(-) rename invitation.py => modules/invitation.py (100%) diff --git a/apis/authentication.py b/apis/authentication.py index 07121e39..857755a8 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -17,7 +17,7 @@ from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate import model -from invitation import ( +from modules.invitation import ( send_email_verification, ) diff --git a/apis/contributor.py b/apis/contributor.py index 3cfd2695..3bda2364 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -6,7 +6,7 @@ from flask_restx import Namespace, Resource, fields import model -from invitation import send_access_contributors, send_invitation_study +from modules.invitation import send_access_contributors, send_invitation_study from .authentication import is_granted diff --git a/modules/__init__.py b/modules/__init__.py index 1bf7a639..f46dda20 100644 --- a/modules/__init__.py +++ b/modules/__init__.py @@ -1 +1,6 @@ from .etl import config, transforms, vtypes + +from .invitation import signin_notification +from .invitation import send_access_contributors +from .invitation import send_invitation_study +from .invitation import send_email_verification \ No newline at end of file diff --git a/invitation.py b/modules/invitation.py similarity index 100% rename from invitation.py rename to modules/invitation.py From 706fd63eb639033aa8fc7694a17ede7a35406d65 Mon Sep 17 00:00:00 2001 From: Aydan <62059163+Aydawka@users.noreply.github.com> Date: Thu, 20 Feb 2025 16:47:16 -0800 Subject: [PATCH 484/505] =?UTF-8?q?fix:=20=E2=9C=A8=20disable=20email=20ve?= =?UTF-8?q?rification=20(#61)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: comment out the verification logic * fix: comment out the testing * fix: comment out confirm and resent api --- apis/authentication.py | 126 +++++++++++---------- apis/contributor.py | 46 ++++---- model/user.py | 2 +- tests/conftest.py | 98 ++++++++-------- tests/functional/test_010_server_launch.py | 6 +- tests/functional/test_020_study_api.py | 6 +- 6 files changed, 146 insertions(+), 138 deletions(-) diff --git a/apis/authentication.py b/apis/authentication.py index 857755a8..eb7a301e 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -17,9 +17,9 @@ from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate import model -from modules.invitation import ( - send_email_verification, -) +# from modules.invitation import ( +# send_email_verification, +# ) api = Namespace("Authentication", description="Authentication paths", path="/") @@ -155,75 +155,83 @@ def validate_password(instance): verification = model.EmailVerification(new_user) new_user.email_verified = False - if os.environ.get("FLASK_ENV") == "testing": - verification.token = 1234567 + # '''enable once email verification is on''' + # if os.environ.get("FLASK_ENV") == "testing": + # verification.token = 1234567 model.db.session.add(new_user) model.db.session.add(verification) - if os.environ.get("FLASK_ENV") != "testing": - if new_user.email_address in bypassed_emails: - new_user.email_verified = True - - model.db.session.commit() - - if g.gb.is_on("email-verification"): - if os.environ.get("FLASK_ENV") != "testing": - if new_user.email_address not in bypassed_emails: - send_email_verification(new_user.email_address, verification.token) - return f"Hi, {new_user.email_address}, you have successfully signed up", 201 - -@api.route("/auth/email-verification/confirm") -class EmailVerification(Resource): - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(contributors_model) - def post(self): - data: Union[Any, dict] = request.json - if "token" not in data or "email" not in data: - return "email or token are required", 422 - user = model.User.query.filter_by(email_address=data["email"]).one_or_none() - if not user: - return "user not found", 404 - if user.email_verified: - return "user already verified", 422 - if os.environ.get("FLASK_ENV") != "testing": - if not user.verify_token(data["token"]): - return "Token invalid or expired", 422 - user.email_verified = True + new_user.email_verified = True + # '''When /confirm endpoint will be enabled, this logic will be moved there + # since users can not be a study contributor without email verification + # set to true, and this can happen only there''' invitations = model.StudyInvitedContributor.query.filter_by( - email_address=data["email"] + email_address=data["email_address"] ).all() for invite in invitations: - invite.study.add_user_to_study(user, invite.permission) + invite.study.add_user_to_study(new_user, invite.permission) model.db.session.delete(invite) model.db.session.commit() - return "Email verified", 201 + # """When the email verification functionality fully enabled these + # lines will be commented out and email will not be verified without email verification.""" + # if os.environ.get("FLASK_ENV") != "testing": + # if new_user.email_address in bypassed_emails: + # new_user.email_verified = True + # if g.gb.is_on("email-verification"): + # if os.environ.get("FLASK_ENV") != "testing": + # if new_user.email_address not in bypassed_emails: + # send_email_verification(new_user.email_address, verification.token) -@api.route("/auth/email-verification/resend") -class GenerateVerification(Resource): - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(contributors_model) - def post(self): - data: Union[Any, dict] = request.json - user = model.User.query.filter_by(email_address=data["email"]).one_or_none() - if not user: - return "user not found", 404 - if user.email_verified: - return "user already verified", 422 - - # user.email_verified = True - token = user.generate_token() - - if g.gb.is_on("email-verification"): - if os.environ.get("FLASK_ENV") != "testing": - send_email_verification(user.email_address, token) + return f"Hi, {new_user.email_address}, you have successfully signed up", 201 - model.db.session.commit() - return "Your email is verified", 201 +# @api.route("/auth/email-verification/confirm") +# class EmailVerification(Resource): +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(contributors_model) +# def post(self): +# data: Union[Any, dict] = request.json +# if "token" not in data or "email" not in data: +# return "email or token are required", 422 +# user = model.User.query.filter_by(email_address=data["email"]).one_or_none() +# if not user: +# return "user not found", 404 +# if user.email_verified: +# return "user already verified", 422 +# if os.environ.get("FLASK_ENV") != "testing": +# if not user.verify_token(data["token"]): +# return "Token invalid or expired", 422 +# user.email_verified = True +# +# model.db.session.commit() +# return "Email verified", 201 +# + +# @api.route("/auth/email-verification/resend") +# class GenerateVerification(Resource): +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(contributors_model) +# def post(self): +# data: Union[Any, dict] = request.json +# user = model.User.query.filter_by(email_address=data["email"]).one_or_none() +# if not user: +# return "user not found", 404 +# if user.email_verified: +# return "user already verified", 422 +# +# # user.email_verified = True +# # token = user.generate_token() +# +# # if g.gb.is_on("email-verification"): +# # if os.environ.get("FLASK_ENV") != "testing": +# # send_email_verification(user.email_address, token) +# +# model.db.session.commit() +# return "Your email is verified", 201 @api.route("/auth/email-verification/check") diff --git a/apis/contributor.py b/apis/contributor.py index 3bda2364..a9ae4fa3 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,12 +1,12 @@ from collections import OrderedDict from typing import Any, Dict, List, Union -import os +# import os from flask import Response, g, request from flask_restx import Namespace, Resource, fields import model -from modules.invitation import send_access_contributors, send_invitation_study +# from modules.invitation import send_access_contributors, send_invitation_study from .authentication import is_granted @@ -49,10 +49,10 @@ def post(self, study_id: int): email_address = data["email_address"] user = model.User.query.filter_by(email_address=email_address).first() permission = data["role"] - contributor_ = None - study_name = study_obj.title - first_name = user.user_details.first_name if user else "" - last_name = user.user_details.last_name if user else "" + # contributor_ = None + # study_name = study_obj.title + # first_name = user.user_details.first_name if user else "" + # last_name = user.user_details.last_name if user else "" try: if user: contributor_ = study_obj.add_user_to_study(user, permission) @@ -62,23 +62,23 @@ def post(self, study_id: int): except model.StudyException as ex: return ex.args[0], 409 model.db.session.commit() - if os.environ.get("FLASK_ENV") != "testing": - if g.gb.is_on("email-verification"): - if user: - send_access_contributors( - email_address, - study_obj, - first_name, - last_name, - contributor_.permission, - ) - else: - send_invitation_study( - email_address, - contributor_.token, - study_name, - contributor_.permission, - ) + # if os.environ.get("FLASK_ENV") != "testing": + # if g.gb.is_on("email-verification"): + # if user: + # send_access_contributors( + # email_address, + # study_obj, + # first_name, + # last_name, + # contributor_.permission, + # ) + # else: + # send_invitation_study( + # email_address, + # contributor_.token, + # study_name, + # contributor_.permission, + # ) return contributor_.to_dict(), 201 diff --git a/model/user.py b/model/user.py index 8215fb7f..42a79320 100644 --- a/model/user.py +++ b/model/user.py @@ -15,7 +15,7 @@ def __init__(self, password): self.created_at = datetime.datetime.now(datetime.timezone.utc).timestamp() self.set_password(password) self.user_details = model.UserDetails(self) - self.email_verified = False + self.email_verified = True db.Column(db.BigInteger, nullable=False) __tablename__ = "user" diff --git a/tests/conftest.py b/tests/conftest.py index bccbd1ec..a27e7009 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -152,21 +152,21 @@ def _create_user(_test_client): assert response.status_code == 201 -@pytest.fixture() -def _verified_client(flask_app): - """Verify the user for testing.""" - - with flask_app.test_client() as _test_client: - response = _test_client.post( - "/auth/email-verification/confirm", - json={ - "email": "test@fairhub.io", - "token": 1234567, - }, - ) - assert response.status_code == 201 - response.close() - yield _test_client +# @pytest.fixture() +# def _verified_client(flask_app): +# """Verify the user for testing.""" +# +# with flask_app.test_client() as _test_client: +# response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "test@fairhub.io", +# "token": 1234567, +# }, +# ) +# assert response.status_code == 201 +# response.close() +# yield _test_client # Fixture to sign in the user for module testing @@ -272,40 +272,40 @@ def _create_viewer_user(flask_app): assert response.status_code == 201 -@pytest.fixture(scope="session") -def _user_verification_for_testing(flask_app): - """Create a viewer user for testing.""" - with flask_app.test_client() as _test_client: - with unittest.mock.patch("pytest_config.TestConfig", TestConfig): - a_response = _test_client.post( - "/auth/email-verification/confirm", - json={ - "email": "admin@fairhub.io", - "token": 1234567, - }, - ) - - assert a_response.status_code == 201 - - e_response = _test_client.post( - "/auth/email-verification/confirm", - json={ - "email": "editor@fairhub.io", - "token": 1234567, - }, - ) - - assert e_response.status_code == 201 - - v_response = _test_client.post( - "/auth/email-verification/confirm", - json={ - "email": "viewer@fairhub.io", - "token": 1234567, - }, - ) - - assert v_response.status_code == 201 +# @pytest.fixture(scope="session") +# def _user_verification_for_testing(flask_app): +# """Create a viewer user for testing.""" +# with flask_app.test_client() as _test_client: +# with unittest.mock.patch("pytest_config.TestConfig", TestConfig): +# a_response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "admin@fairhub.io", +# "token": 1234567, +# }, +# ) +# +# assert a_response.status_code == 201 +# +# e_response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "editor@fairhub.io", +# "token": 1234567, +# }, +# ) +# +# assert e_response.status_code == 201 +# +# v_response = _test_client.post( +# "/auth/email-verification/confirm", +# json={ +# "email": "viewer@fairhub.io", +# "token": 1234567, +# }, +# ) +# +# assert v_response.status_code == 201 @pytest.fixture(scope="session") diff --git a/tests/functional/test_010_server_launch.py b/tests/functional/test_010_server_launch.py index 0c6fab97..66c23759 100644 --- a/tests/functional/test_010_server_launch.py +++ b/tests/functional/test_010_server_launch.py @@ -27,9 +27,9 @@ def test_db_empty(_test_client, _empty_db, _create_user): print("User created for testing") -def test_verify_user(_verified_client): - """Verifies user before testing.""" - print("User verified for testing") +# def test_verify_user(_verified_client): +# """Verifies user before testing.""" +# print("User verified for testing") def test_signin_user(_logged_in_client): diff --git a/tests/functional/test_020_study_api.py b/tests/functional/test_020_study_api.py index ec2975c7..f1a52c85 100644 --- a/tests/functional/test_020_study_api.py +++ b/tests/functional/test_020_study_api.py @@ -50,9 +50,9 @@ def test_viewer_editor_user(_create_viewer_user): print("Viewer user created for testing") -def test_user_verification(_user_verification_for_testing): - """User are verified for testing""" - print("Users are verified for testing") +# def test_user_verification(_user_verification_for_testing): +# """User are verified for testing""" +# print("Users are verified for testing") def test_signin_all_clients(clients): From e37882bf42f8a5a4d87f465a41686f1a98006583 Mon Sep 17 00:00:00 2001 From: Aydan <62059163+Aydawka@users.noreply.github.com> Date: Thu, 6 Mar 2025 15:15:47 -0800 Subject: [PATCH 485/505] refactor: study metadata pages (#62) * fix: format * fix: team api * fix: comment out unused api * fix: description * fix: identification * wip: test * wip: test team api * wip: test description page * ci: update workflow * ci: update workflow * style: format fix * fix: minlength rule * fix: uncomment testing * fix: uncomment testing * fix: study metadata * fix: version study metadata tedting * fix: minor issues --- .github/workflows/lint.yml | 2 +- .github/workflows/test.yml | 2 +- apis/__init__.py | 3 +- apis/authentication.py | 14 +- apis/contributor.py | 13 +- apis/study_metadata/study_collaborators.py | 157 +- apis/study_metadata/study_conditions.py | 140 +- apis/study_metadata/study_description.py | 180 +- apis/study_metadata/study_identification.py | 229 +- apis/study_metadata/study_keywords.py | 141 +- apis/study_metadata/study_sponsors.py | 164 -- apis/study_metadata/study_team.py | 231 ++ app.py | 17 +- model/__init__.py | 4 +- tests/conftest.py | 15 +- .../functional/test_050_study_metadata_api.py | 2485 ++++++++--------- .../functional/test_060_study_version_api.py | 145 +- tests/functional/test_070_user.py | 22 +- 18 files changed, 2056 insertions(+), 1908 deletions(-) delete mode 100644 apis/study_metadata/study_sponsors.py create mode 100644 apis/study_metadata/study_team.py diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 29f12739..b071c8cc 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -23,7 +23,7 @@ jobs: - name: Install dependencies run: poetry install --no-root - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: .venv key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 80c95a55..7f449e4f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,7 +31,7 @@ jobs: - name: Install dependencies run: poetry install --no-root - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: .venv key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} diff --git a/apis/__init__.py b/apis/__init__.py index caefd02b..399a909f 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -1,6 +1,7 @@ """Initialize the api system for the backend""" from flask_restx import Api, Resource + from apis.dataset_metadata_namespace import api as dataset_metadata_namespace from apis.study_metadata_namespace import api as study_metadata_namespace @@ -42,7 +43,7 @@ from .study_metadata.study_location import api as location from .study_metadata.study_overall_official import api as overall_official from .study_metadata.study_oversight import api as oversight -from .study_metadata.study_sponsors import api as sponsors +from .study_metadata.study_team import api as sponsors from .study_metadata.study_status import api as status from .user import api as user from .utils import api as utils diff --git a/apis/authentication.py b/apis/authentication.py index eb7a301e..fc6f5da3 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -6,17 +6,19 @@ import importlib import os import re +import time import uuid from datetime import timezone from typing import Any, Union -import time import jwt from email_validator import EmailNotValidError, validate_email from flask import g, make_response, request from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate + import model + # from modules.invitation import ( # send_email_verification, # ) @@ -187,6 +189,7 @@ def validate_password(instance): return f"Hi, {new_user.email_address}, you have successfully signed up", 201 + # @api.route("/auth/email-verification/confirm") # class EmailVerification(Resource): # @api.response(200, "Success") @@ -336,7 +339,6 @@ def validate_is_valid_email(instance): "user": user.id, "exp": expired_in, "jti": jti, - }, # noqa: W503 config.FAIRHUB_SECRET, algorithm="HS256", @@ -555,11 +557,9 @@ def post(self): resp.status_code = 204 if g.user and g.token: - remove_session = ( - model.Session.query - .filter(model.Session.id == g.token) - .first() - ) + remove_session = model.Session.query.filter( + model.Session.id == g.token + ).first() if remove_session: model.db.session.delete(remove_session) model.db.session.commit() diff --git a/apis/contributor.py b/apis/contributor.py index a9ae4fa3..11a5ab42 100644 --- a/apis/contributor.py +++ b/apis/contributor.py @@ -1,15 +1,19 @@ from collections import OrderedDict from typing import Any, Dict, List, Union -# import os from flask import Response, g, request from flask_restx import Namespace, Resource, fields import model -# from modules.invitation import send_access_contributors, send_invitation_study from .authentication import is_granted +# import os + + +# from modules.invitation import send_access_contributors, send_invitation_study + + api = Namespace("Contributor", description="Contributors", path="/") @@ -29,7 +33,9 @@ class AllContributors(Resource): # @api.marshal_with(contributors_model) def get(self, study_id: int): contributors = model.StudyContributor.query.filter_by(study_id=study_id).all() - invited_contributors = model.StudyInvitedContributor.query.filter_by(study_id=study_id).all() + invited_contributors = model.StudyInvitedContributor.query.filter_by( + study_id=study_id + ).all() contributors_list = [c.to_dict() for c in contributors] + [ c.to_dict() for c in invited_contributors @@ -40,7 +46,6 @@ def get(self, study_id: int): @api.response(400, "Validation Error") # @api.marshal_with(contributors_model) def post(self, study_id: int): - study_obj = model.Study.query.get(study_id) if not is_granted("invite_contributor", study_obj): return "Access denied, you can not modify study", 403 diff --git a/apis/study_metadata/study_collaborators.py b/apis/study_metadata/study_collaborators.py index 018f3062..296a5acb 100644 --- a/apis/study_metadata/study_collaborators.py +++ b/apis/study_metadata/study_collaborators.py @@ -1,90 +1,89 @@ """API routes for study collaborators metadata""" -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +from flask import Response +from flask_restx import Resource import model from apis.study_metadata_namespace import api from ..authentication import is_granted -study_collaborators = api.model( - "StudyCollaborators", - { - "id": fields.String(required=True), - "name": fields.String(required=True), - "identifier": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/collaborators") -class StudyCollaboratorsResource(Resource): - """Study Collaborators Metadata""" - - @api.doc("collaborators") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_collaborators) - def get(self, study_id: int): - """Get study collaborators metadata""" - study_ = model.Study.query.get(study_id) - study_collaborators_ = study_.study_collaborators - - return [collab.to_dict() for collab in study_collaborators_], 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """updating study collaborators""" - # Schema validation - schema = { - "type": "array", - "additionalProperties": False, - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string"}, - "identifier": {"type": "string"}, - "identifier_scheme": {"type": "string"}, - "identifier_scheme_uri": {"type": "string"}, - }, - "required": [ - "name", - "identifier", - "identifier_scheme", - ], - }, - } - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_collaborators_ = model.StudyCollaborators.query.get(i["id"]) - study_collaborators_.update(i) - else: - study_collaborators_ = model.StudyCollaborators.from_data(study_obj, i) - model.db.session.add(study_collaborators_) - list_of_elements.append(study_collaborators_.to_dict()) - model.db.session.commit() - - return list_of_elements, 201 +# +# study_collaborators = api.model( +# "StudyCollaborators", +# { +# "id": fields.String(required=True), +# "name": fields.String(required=True), +# "identifier": fields.String(required=True), +# "scheme": fields.String(required=True), +# "scheme_uri": fields.String(required=True), +# }, +# ) + +# +# @api.route("/study//metadata/collaborators") +# class StudyCollaboratorsResource(Resource): +# """Study Collaborators Metadata""" +# +# @api.doc("collaborators") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(study_collaborators) +# def get(self, study_id: int): +# """Get study collaborators metadata""" +# study_ = model.Study.query.get(study_id) +# study_collaborators_ = study_.study_collaborators +# +# return [collab.to_dict() for collab in study_collaborators_], 200 +# +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def post(self, study_id: int): +# """updating study collaborators""" +# # Schema validation +# schema = { +# "type": "array", +# "additionalProperties": False, +# "items": { +# "type": "object", +# "properties": { +# "id": {"type": "string"}, +# "name": {"type": "string"}, +# "identifier": {"type": "string"}, +# "identifier_scheme": {"type": "string"}, +# "identifier_scheme_uri": {"type": "string"}, +# }, +# "required": [ +# "name", +# "identifier", +# "identifier_scheme", +# ], +# }, +# } +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 +# +# data: typing.Union[dict, typing.Any] = request.json +# +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not modify study", 403 +# +# list_of_elements = [] +# for i in data: +# if "id" in i and i["id"]: +# study_collaborators_ = model.StudyCollaborators.query.get(i["id"]) +# study_collaborators_.update(i) +# else: +# study_collaborators_ = model.StudyCollaborators.from_data(study_obj, i) +# model.db.session.add(study_collaborators_) +# list_of_elements.append(study_collaborators_.to_dict()) +# model.db.session.commit() +# +# return list_of_elements, 201 +# @api.route("/study//metadata/collaborators/") diff --git a/apis/study_metadata/study_conditions.py b/apis/study_metadata/study_conditions.py index c0dfcfcb..c812a07d 100644 --- a/apis/study_metadata/study_conditions.py +++ b/apis/study_metadata/study_conditions.py @@ -1,89 +1,73 @@ """API routes for study other metadata""" -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +from flask import Response +from flask_restx import Resource import model from apis.study_metadata_namespace import api from ..authentication import is_granted -study_other = api.model( - "StudyConditions", - { - "id": fields.String(required=True), - "name": fields.Boolean(required=True), - "classification_code": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "condition_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/conditions") -class StudyCondition(Resource): - """Study Conditions Metadata""" - - @api.doc("conditions") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study conditions metadata""" - study_ = model.Study.query.get(study_id) - - study_conditions = study_.study_conditions - - return [s.to_dict() for s in study_conditions], 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """Create study condition metadata""" - # Schema validation - schema = { - "type": "array", - "additionalProperties": False, - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "classification_code": {"type": "string"}, - "scheme": {"type": "string"}, - "scheme_uri": {"type": "string"}, - "condition_uri": {"type": "string"}, - }, - "required": ["name", "classification_code", "condition_uri"], - }, - } - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - data: typing.Union[dict, typing.Any] = request.json - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_conditions_ = model.StudyConditions.query.get(i["id"]) - if not study_conditions_: - return f"Study condition {i['id']} Id is not found", 404 - study_conditions_.update(i) - list_of_elements.append(study_conditions_.to_dict()) - elif "id" not in i or not i["id"]: - study_conditions_ = model.StudyConditions.from_data(study_obj, i) - model.db.session.add(study_conditions_) - list_of_elements.append(study_conditions_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 +# @api.route("/study//metadata/conditions") +# class StudyCondition(Resource): +# """Study Conditions Metadata""" +# +# @api.doc("conditions") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(study_other) +# def get(self, study_id: int): +# """Get study conditions metadata""" +# study_ = model.Study.query.get(study_id) +# +# study_conditions = study_.study_conditions +# +# return [s.to_dict() for s in study_conditions], 200 +# +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def post(self, study_id: int): +# """Create study condition metadata""" +# # Schema validation +# schema = { +# "type": "array", +# "additionalProperties": False, +# "items": { +# "type": "object", +# "properties": { +# "id": {"type": "string"}, +# "name": {"type": "string", "minLength": 1}, +# "classification_code": {"type": "string"}, +# "scheme": {"type": "string"}, +# "scheme_uri": {"type": "string"}, +# "condition_uri": {"type": "string"}, +# }, +# "required": ["name", "classification_code", "condition_uri"], +# }, +# } +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not modify study", 403 +# +# data: typing.Union[dict, typing.Any] = request.json +# list_of_elements = [] +# for i in data: +# if "id" in i and i["id"]: +# study_conditions_ = model.StudyConditions.query.get(i["id"]) +# if not study_conditions_: +# return f"Study condition {i['id']} Id is not found", 404 +# study_conditions_.update(i) +# list_of_elements.append(study_conditions_.to_dict()) +# elif "id" not in i or not i["id"]: +# study_conditions_ = model.StudyConditions.from_data(study_obj, i) +# model.db.session.add(study_conditions_) +# list_of_elements.append(study_conditions_.to_dict()) +# model.db.session.commit() +# return list_of_elements, 201 @api.route("/study//metadata/conditions/") diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 0c181dee..a47ca2b2 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -1,5 +1,7 @@ """API routes for study description metadata""" +import typing + from flask import request from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -18,6 +20,43 @@ }, ) +study_other = api.model( + "StudyConditions", + { + "id": fields.String(required=True), + "name": fields.Boolean(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "condition_uri": fields.String(required=True), + }, +) + +study_keywords = api.model( + "StudyKeywords", + { + "id": fields.String(required=True), + "name": fields.Boolean(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "keyword_uri": fields.String(required=True), + }, +) + + +study_identification = api.model( + "StudyIdentification", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_domain": fields.String(required=True), + "identifier_link": fields.String(required=True), + "secondary": fields.Boolean(required=True), + }, +) + @api.route("/study//metadata/description") class StudyDescriptionResource(Resource): @@ -26,31 +65,89 @@ class StudyDescriptionResource(Resource): @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(study_description) + # @api.marshal_with(study_description) def get(self, study_id: int): """Get study description metadata""" study_ = model.Study.query.get(study_id) - + identifiers = model.Identifiers(study_) + study_keywords = study_.study_keywords + study_conditions = study_.study_conditions study_description_ = study_.study_description - - return study_description_.to_dict(), 200 + return { + "identification": identifiers.to_dict(), + "keywords": [k.to_dict() for k in study_keywords], + "conditions": [c.to_dict() for c in study_conditions], + "description": study_description_.to_dict(), + }, 200 @api.response(200, "Success") @api.response(400, "Validation Error") - def put(self, study_id: int): + def post(self, study_id: int): """Update study description metadata""" - study_obj = model.Study.query.get(study_id) # Schema validation schema = { "type": "object", "additionalProperties": False, + "required": [], "properties": { - "brief_summary": {"type": "string", "minLength": 1}, - "detailed_description": { - "type": "string", + "conditions": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "condition_uri": {"type": "string"}, + }, + "required": ["name", "classification_code", "condition_uri"], + "additionalProperties": False, + }, + }, + "keywords": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "classification_code": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "keyword_uri": {"type": "string"}, + }, + "required": ["name", "classification_code", "keyword_uri"], + "additionalProperties": False, + }, + }, + "identification": { + "type": "object", + "additionalProperties": False, + "properties": { + "primary": { + "type": "object", + "additionalProperties": False, + "properties": { + "identifier": {"type": "string"}, + "identifier_type": {"type": "string", "minLength": 1}, + "identifier_domain": {"type": "string"}, + "identifier_link": {"type": "string"}, + }, + }, + "secondary": {"type": "array"}, + }, + }, + "description": { + "type": "object", + "additionalProperties": False, + "properties": { + "brief_summary": {"type": "string"}, + "detailed_description": {"type": "string"}, + }, }, }, - "required": ["brief_summary", "detailed_description"], } try: @@ -59,12 +156,69 @@ def put(self, study_id: int): return e.message, 400 study_obj = model.Study.query.get(study_id) + data: typing.Union[dict, typing.Any] = request.json + if not is_granted("study_metadata", study_obj): return "Access denied, you can not modify study", 403 - study_ = model.Study.query.get(study_id) + study_obj.study_description.update(data["description"]) + + list_of_keywords = [] + for i in data["keywords"]: + if "id" in i and i["id"]: + study_keywords_ = model.StudyKeywords.query.get(i["id"]) + if not study_keywords_: + return f"Study keywords {i['id']} Id is not found", 404 + study_keywords_.update(i) + list_of_keywords.append(study_keywords_.to_dict()) + elif "id" not in i or not i["id"]: + study_keywords_ = model.StudyKeywords.from_data(study_obj, i) + model.db.session.add(study_keywords_) + list_of_keywords.append(study_keywords_.to_dict()) + + list_of_conditions = [] + for i in data["conditions"]: + if "id" in i and i["id"]: + study_conditions_ = model.StudyConditions.query.get(i["id"]) + if not study_conditions_: + return f"Study condition {i['id']} Id is not found", 404 + study_conditions_.update(i) + list_of_conditions.append(study_conditions_.to_dict()) + elif "id" not in i or not i["id"]: + study_conditions_ = model.StudyConditions.from_data(study_obj, i) + model.db.session.add(study_conditions_) + list_of_conditions.append(study_conditions_.to_dict()) + + identifiers = [i for i in study_obj.study_identification if not i.secondary] + primary_identifier = identifiers[0] if len(identifiers) else None - study_.study_description.update(request.json) + primary: dict = data["identification"]["primary"] + + if primary_identifier: + primary_identifier.update(primary) + else: + study_identification_ = model.StudyIdentification.from_data( + study_obj, primary, False + ) + model.db.session.add(study_identification_) + + for i in data["identification"]["secondary"]: + i["secondary"] = True + if "id" in i and i["id"]: + study_identification_ = model.StudyIdentification.query.get(i["id"]) + study_identification_.update(i) + else: + study_identification_ = model.StudyIdentification.from_data( + study_obj, i, True + ) + model.db.session.add(study_identification_) model.db.session.commit() - return study_.study_description.to_dict(), 200 + final_identifiers = model.Identifiers(study_obj) + + return { + "description": study_obj.study_description.to_dict(), + "conditions": list_of_conditions, + "keywords": list_of_keywords, + "identification": final_identifiers.to_dict(), + }, 201 diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py index 01a3a0fa..2ff70816 100644 --- a/apis/study_metadata/study_identification.py +++ b/apis/study_metadata/study_identification.py @@ -1,138 +1,121 @@ """API routes for study identification metadata""" -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +from flask import Response +from flask_restx import Resource import model from apis.study_metadata_namespace import api from ..authentication import is_granted -study_identification = api.model( - "StudyIdentification", - { - "id": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_type": fields.String(required=True), - "identifier_domain": fields.String(required=True), - "identifier_link": fields.String(required=True), - "secondary": fields.Boolean(required=True), - }, -) - - -@api.route("/study//metadata/identification") -class StudyIdentificationResource(Resource): +# @api.route("/study//metadata/identification") +# class StudyIdentificationResource(Resource): +# """Study Identification Metadata""" +# +# @api.doc("identification") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.param("id", "The study identifier") +# # @api.marshal_with(study_identification) +# def get(self, study_id: int): +# """Get study identification metadata""" +# study_ = model.Study.query.get(study_id) +# identifiers = model.Identifiers(study_) +# return identifiers.to_dict(), 200 +# +# @api.doc("identification add") +# @api.response(201, "Success") +# @api.response(400, "Validation Error") +# @api.expect(study_identification) +# def post(self, study_id: int): +# """Create study identification metadata""" +# # Schema validation +# schema = { +# "type": "object", +# "additionalProperties": False, +# "properties": { +# "primary": { +# "type": "object", +# "additionalProperties": False, +# "properties": { +# "identifier": {"type": "string", "minLength": 1}, +# "identifier_type": { +# "type": "string", +# "minLength": 1, +# }, +# "identifier_domain": { +# "type": "string", +# }, +# "identifier_link": { +# "type": "string", +# }, +# }, +# }, +# "secondary": { +# "type": "array", +# }, +# }, +# } +# +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 +# +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not modify study", 403 +# +# data: typing.Union[dict, typing.Any] = request.json +# identifiers = [i for i in study_obj.study_identification if not i.secondary] +# primary_identifier = identifiers[0] if len(identifiers) else None +# +# primary: dict = data["primary"] +# +# if primary_identifier: +# primary_identifier.update(primary) +# else: +# study_identification_ = model.StudyIdentification.from_data( +# study_obj, primary, False +# ) +# model.db.session.add(study_identification_) +# +# for i in data["secondary"]: +# i["secondary"] = True +# if "id" in i and i["id"]: +# study_identification_ = model.StudyIdentification.query.get(i["id"]) +# study_identification_.update(i) +# else: +# study_identification_ = model.StudyIdentification.from_data( +# study_obj, i, True +# ) +# model.db.session.add(study_identification_) +# +# model.db.session.commit() +# +# final_identifiers = model.Identifiers(study_obj) +# +# return final_identifiers.to_dict(), 201 + + +@api.route("/study//metadata/identification/") +class StudyIdentificationdDelete(Resource): """Study Identification Metadata""" - @api.doc("identification") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The study identifier") - # @api.marshal_with(study_identification) - def get(self, study_id: int): - """Get study identification metadata""" - study_ = model.Study.query.get(study_id) - identifiers = model.Identifiers(study_) - return identifiers.to_dict(), 200 - - @api.doc("identification add") - @api.response(201, "Success") + @api.doc("Delete Study Identifications") + @api.response(204, "Success") @api.response(400, "Validation Error") - @api.expect(study_identification) - def post(self, study_id: int): - """Create study identification metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "primary": { - "type": "object", - "additionalProperties": False, - "properties": { - "identifier": {"type": "string", "minLength": 1}, - "identifier_type": { - "type": "string", - "minLength": 1, - }, - "identifier_domain": { - "type": "string", - }, - "identifier_link": { - "type": "string", - }, - }, - }, - "secondary": { - "type": "array", - }, - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - data: typing.Union[dict, typing.Any] = request.json - identifiers = [i for i in study_obj.study_identification if not i.secondary] - primary_identifier = identifiers[0] if len(identifiers) else None + def delete(self, study_id: int, identification_id: int): + """Delete study identification metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 - primary: dict = data["primary"] - - if primary_identifier: - primary_identifier.update(primary) - else: - study_identification_ = model.StudyIdentification.from_data( - study_obj, primary, False - ) - model.db.session.add(study_identification_) - - for i in data["secondary"]: - i["secondary"] = True - if "id" in i and i["id"]: - study_identification_ = model.StudyIdentification.query.get(i["id"]) - study_identification_.update(i) - else: - study_identification_ = model.StudyIdentification.from_data( - study_obj, i, True - ) - model.db.session.add(study_identification_) + study_identification_ = model.StudyIdentification.query.get(identification_id) + if not study_identification_.secondary: + return "primary identifier can not be deleted", 400 + model.db.session.delete(study_identification_) model.db.session.commit() - final_identifiers = model.Identifiers(study_obj) - - return final_identifiers.to_dict(), 201 - - @api.route("/study//metadata/identification/") - class StudyIdentificationdUpdate(Resource): - """Study Identification Metadata""" - - @api.doc("Delete Study Identifications") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, identification_id: int): - """Delete study identification metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - - study_identification_ = model.StudyIdentification.query.get( - identification_id - ) - if not study_identification_.secondary: - return "primary identifier can not be deleted", 400 - - model.db.session.delete(study_identification_) - model.db.session.commit() - - return Response(status=204) + return Response(status=204) diff --git a/apis/study_metadata/study_keywords.py b/apis/study_metadata/study_keywords.py index 4e6c420b..27f90ea7 100644 --- a/apis/study_metadata/study_keywords.py +++ b/apis/study_metadata/study_keywords.py @@ -1,89 +1,74 @@ """API routes for study other metadata""" -import typing - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate +from flask import Response +from flask_restx import Resource import model from apis.study_metadata_namespace import api from ..authentication import is_granted -study_keywords = api.model( - "StudyKeywords", - { - "id": fields.String(required=True), - "name": fields.Boolean(required=True), - "classification_code": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "keyword_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/keywords") -class StudyKeywords(Resource): - """Study Keywords Metadata""" - - @api.doc("keywords") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(study_other) - def get(self, study_id: int): - """Get study keywords metadata""" - study_ = model.Study.query.get(study_id) - study_keywords = study_.study_keywords - - return [k.to_dict() for k in study_keywords], 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int): - """Create study keywords metadata""" - # Schema validation - schema = { - "type": "array", - "additionalProperties": False, - "items": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "classification_code": {"type": "string"}, - "scheme": {"type": "string"}, - "scheme_uri": {"type": "string"}, - "keyword_uri": {"type": "string"}, - }, - "required": ["name", "classification_code", "keyword_uri"], - }, - } - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not modify study", 403 - - data: typing.Union[dict, typing.Any] = request.json - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - study_keywords_ = model.StudyKeywords.query.get(i["id"]) - if not study_keywords_: - return f"Study keywords {i['id']} Id is not found", 404 - study_keywords_.update(i) - list_of_elements.append(study_keywords_.to_dict()) - elif "id" not in i or not i["id"]: - study_keywords_ = model.StudyKeywords.from_data(study_obj, i) - model.db.session.add(study_keywords_) - list_of_elements.append(study_keywords_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 +# +# @api.route("/study//metadata/keywords") +# class StudyKeywords(Resource): +# """Study Keywords Metadata""" +# +# @api.doc("keywords") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# # @api.marshal_with(study_other) +# def get(self, study_id: int): +# """Get study keywords metadata""" +# study_ = model.Study.query.get(study_id) +# study_keywords = study_.study_keywords +# +# return [k.to_dict() for k in study_keywords], 200 +# +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def post(self, study_id: int): +# """Create study keywords metadata""" +# # Schema validation +# schema = { +# "type": "array", +# "additionalProperties": False, +# "items": { +# "type": "object", +# "properties": { +# "id": {"type": "string"}, +# "name": {"type": "string", "minLength": 1}, +# "classification_code": {"type": "string"}, +# "scheme": {"type": "string"}, +# "scheme_uri": {"type": "string"}, +# "keyword_uri": {"type": "string"}, +# }, +# "required": ["name", "classification_code", "keyword_uri"], +# }, +# } +# try: +# validate(request.json, schema) +# except ValidationError as e: +# return e.message, 400 +# +# study_obj = model.Study.query.get(study_id) +# if not is_granted("study_metadata", study_obj): +# return "Access denied, you can not modify study", 403 +# +# data: typing.Union[dict, typing.Any] = request.json +# list_of_elements = [] +# for i in data: +# if "id" in i and i["id"]: +# study_keywords_ = model.StudyKeywords.query.get(i["id"]) +# if not study_keywords_: +# return f"Study keywords {i['id']} Id is not found", 404 +# study_keywords_.update(i) +# list_of_elements.append(study_keywords_.to_dict()) +# elif "id" not in i or not i["id"]: +# study_keywords_ = model.StudyKeywords.from_data(study_obj, i) +# model.db.session.add(study_keywords_) +# list_of_elements.append(study_keywords_.to_dict()) +# model.db.session.commit() +# return list_of_elements, 201 @api.route("/study//metadata/keywords/") diff --git a/apis/study_metadata/study_sponsors.py b/apis/study_metadata/study_sponsors.py deleted file mode 100644 index 1d4a66a3..00000000 --- a/apis/study_metadata/study_sponsors.py +++ /dev/null @@ -1,164 +0,0 @@ -"""API routes for study sponsors and collaborators metadata""" - -import typing - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -study_sponsors = api.model( - "StudySponsors", - { - "responsible_party_type": fields.String(required=True), - "responsible_party_investigator_first_name": fields.String(required=False), - "responsible_party_investigator_last_name": fields.String(required=True), - "responsible_party_investigator_title": fields.String(required=True), - "responsible_party_investigator_identifier_value": fields.String(required=True), - "responsible_party_investigator_identifier_scheme": fields.String( - required=True - ), - "responsible_party_investigator_identifier_scheme_uri": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_name": fields.String(required=True), - "responsible_party_investigator_affiliation_identifier_scheme": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_identifier_value": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String( - required=True - ), - "lead_sponsor_name": fields.String(required=True), - "lead_sponsor_identifier": fields.String(required=True), - "lead_sponsor_identifier_scheme": fields.String(required=True), - "lead_sponsor_identifier_scheme_uri": fields.String(required=True), - }, -) - - -@api.route("/study//metadata/sponsor") -class StudySponsorsResource(Resource): - """Study Sponsors Metadata""" - - @api.doc("sponsors") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(study_sponsors) - def get(self, study_id: int): - """Get study sponsors metadata""" - study_ = model.Study.query.get(study_id) - - study_sponsors_ = study_.study_sponsors - - return study_sponsors_.to_dict(), 200 - - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int): - """Update study sponsors metadata""" - # Schema validation - schema = { - "type": "object", - "additionalProperties": False, - "required": [ - "responsible_party_type", - "lead_sponsor_name", - "responsible_party_investigator_last_name", - "responsible_party_investigator_first_name", - "responsible_party_investigator_title", - ], - "properties": { - "responsible_party_type": { - "type": ["string", "null"], - "enum": [ - "Sponsor", - "Principal Investigator", - "Sponsor-Investigator", - ], - }, - "responsible_party_investigator_first_name": { - "type": "string", - }, - "responsible_party_investigator_last_name": { - "type": "string", - }, - "responsible_party_investigator_title": { - "type": "string", - }, - "responsible_party_investigator_identifier_value": { - "type": "string", - }, - "responsible_party_investigator_identifier_scheme": { - "type": "string", - }, - "responsible_party_investigator_identifier_scheme_uri": { - "type": "string", - }, - "responsible_party_investigator_affiliation_name": { - "type": "string", - }, - "responsible_party_investigator_affiliation_identifier_scheme": { - "type": "string", - }, - "responsible_party_investigator_affiliation_identifier_value": { - "type": "string", - }, - "responsible_party_investigator_affiliation_identifier_scheme_uri": { - "type": "string", - }, - "lead_sponsor_name": {"type": "string"}, - "lead_sponsor_identifier": {"type": "string"}, - "lead_sponsor_identifier_scheme": {"type": "string"}, - "lead_sponsor_identifier_scheme_uri": { - "type": "string", - }, - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: typing.Union[dict, typing.Any] = request.json - if data["responsible_party_type"] in [ - "Principal Investigator", - "Sponsor-Investigator", - ]: - if not data["responsible_party_investigator_last_name"]: - return "Principal Investigator name is required", 400 - if not data["responsible_party_investigator_first_name"]: - return "Principal Investigator name is required", 400 - - if not data["responsible_party_investigator_title"]: - return "Principal Investigator title is required", 400 - - investigator_first_name = data["responsible_party_investigator_first_name"] - investigator_last_name = data["responsible_party_investigator_last_name"] - investigator_title = data["responsible_party_investigator_title"] - - if investigator_first_name == "": - return "Principal Investigator first name cannot be empty", 400 - if investigator_last_name == "": - return "Principal Investigator last name cannot be empty", 400 - if investigator_title == "": - return "Principal Investigator title cannot be empty", 400 - - study_ = model.Study.query.get(study_id) - - # Check user permissions - if not is_granted("study_metadata", study_): - return "Access denied, you can not modify study", 403 - - study_.study_sponsors.update(data) - - model.db.session.commit() - - return study_.study_sponsors.to_dict(), 200 diff --git a/apis/study_metadata/study_team.py b/apis/study_metadata/study_team.py new file mode 100644 index 00000000..fc3c5697 --- /dev/null +++ b/apis/study_metadata/study_team.py @@ -0,0 +1,231 @@ +"""API routes for study sponsors and collaborators metadata""" + +import typing + +from flask import request +from flask_restx import Resource, fields + +import model +from apis.study_metadata_namespace import api + +from ..authentication import is_granted + +study_sponsors = api.model( + "StudySponsors", + { + "responsible_party_type": fields.String(required=False), + "responsible_party_investigator_first_name": fields.String(required=True), + "responsible_party_investigator_last_name": fields.String(required=True), + "responsible_party_investigator_title": fields.String(required=True), + "responsible_party_investigator_identifier_value": fields.String(required=True), + "responsible_party_investigator_identifier_scheme": fields.String( + required=True + ), + "responsible_party_investigator_identifier_scheme_uri": fields.String( + required=True + ), + "responsible_party_investigator_affiliation_name": fields.String(required=True), + "responsible_party_investigator_affiliation_identifier_scheme": fields.String( + required=True + ), + "responsible_party_investigator_affiliation_identifier_value": fields.String( + required=True + ), + "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String( + required=True + ), + "lead_sponsor_name": fields.String(required=True), + "lead_sponsor_identifier": fields.String(required=True), + "lead_sponsor_identifier_scheme": fields.String(required=True), + "lead_sponsor_identifier_scheme_uri": fields.String(required=True), + }, +) + +study_collaborators = api.model( + "StudyCollaborators", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "created_at": fields.Integer(required=True), + }, +) + + +@api.route("/study//metadata/team") +class StudySponsorsResource(Resource): + """Study team Metadata""" + + @api.doc("sponsors") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with( + # { + # "sponsors": study_sponsors, + # "collaborators": study_collaborators + # } + # ) + def get(self, study_id: int): + """Get study team metadata""" + study_ = model.Study.query.get(study_id) + + study_sponsors_ = study_.study_sponsors + study_collaborators_ = study_.study_collaborators + # print(study_sponsors_.to_dict(),"ggg") + + return { + "sponsors": study_sponsors_.to_dict(), + "collaborators": [collab.to_dict() for collab in study_collaborators_], + } + 200 + + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int): + """Update study team metadata""" + # Schema validation + # schema = { + # "type": "object", + # "additionalProperties": False, + # "properties": { + # "collaborators": { + # "type": "array", + # "additionalProperties": False, + # "items": { + # "type": "object", + # "properties": { + # "id": {"type": "string"}, + # "name": {"type": "string"}, + # "identifier": {"type": "string"}, + # "identifier_scheme": {"type": "string"}, + # "identifier_scheme_uri": {"type": "string"}, + # }, + # "required": [ + # "name", + # "identifier", + # "identifier_scheme", + # ], + # }, + # }, + # "sponsors": + # { + # "type": "object", + # "additionalProperties": False, + # "properties": { + # "responsible_party_type": { + # "type": ["string", "null"], + # "enum": [ + # "Sponsor", + # "Principal Investigator", + # "Sponsor-Investigator", + # ], + # }, + # "responsible_party_investigator_first_name": { + # "type": "string", + # }, + # "responsible_party_investigator_last_name": { + # "type": "string", + # }, + # "responsible_party_investigator_title": { + # "type": "string", + # }, + # "responsible_party_investigator_identifier_value": { + # "type": "string", + # }, + # "responsible_party_investigator_identifier_scheme": { + # "type": "string", + # }, + # "responsible_party_investigator_identifier_scheme_uri": { + # "type": "string", + # }, + # "responsible_party_investigator_affiliation_name": { + # "type": "string", + # }, + # "responsible_party_investigator_affiliation_identifier_scheme": { + # "type": "string", + # }, + # "responsible_party_investigator_affiliation_identifier_value": { + # "type": "string", + # }, + # "responsible_party_investigator_affiliation_identifier_scheme_uri": { + # "type": "string", + # }, + # "lead_sponsor_name": {"type": "string"}, + # "lead_sponsor_identifier": {"type": "string"}, + # "lead_sponsor_identifier_scheme": {"type": "string"}, + # "lead_sponsor_identifier_scheme_uri": { + # "type": "string", + # }, + # }, + # "required": [ + # "responsible_party_type", + # "lead_sponsor_name", + # "responsible_party_investigator_last_name", + # "responsible_party_investigator_first_name", + # "responsible_party_investigator_title", + # ], + # } + # } + # } + # + # try: + # validate(request.json, schema) + # except ValidationError as e: + # return e.message, 400 + data: typing.Union[dict, typing.Any] = request.json + + if data["sponsors"]["responsible_party_type"] in [ + "Principal Investigator", + "Sponsor-Investigator", + ]: + if not data["sponsors"]["responsible_party_investigator_last_name"]: + return "Principal Investigator name is required", 400 + if not data["sponsors"]["responsible_party_investigator_first_name"]: + return "Principal Investigator name is required", 400 + + if not data["sponsors"]["responsible_party_investigator_title"]: + return "Principal Investigator title is required", 400 + + investigator_first_name = data["sponsors"][ + "responsible_party_investigator_first_name" + ] + investigator_last_name = data["sponsors"][ + "responsible_party_investigator_last_name" + ] + investigator_title = data["sponsors"][ + "responsible_party_investigator_title" + ] + + if investigator_first_name == "": + return "Principal Investigator first name cannot be empty", 400 + if investigator_last_name == "": + return "Principal Investigator last name cannot be empty", 400 + if investigator_title == "": + return "Principal Investigator title cannot be empty", 400 + + study_ = model.Study.query.get(study_id) + + # Check user permissions + if not is_granted("study_metadata", study_): + return "Access denied, you can not modify study", 403 + + list_of_elements = [] + for i in data["collaborators"]: + if "id" in i and i["id"]: + study_collaborators_ = model.StudyCollaborators.query.get(i["id"]) + study_collaborators_.update(i) + else: + study_collaborators_ = model.StudyCollaborators.from_data(study_, i) + model.db.session.add(study_collaborators_) + list_of_elements.append(study_collaborators_.to_dict()) + + study_.study_sponsors.update(data["sponsors"]) + + model.db.session.commit() + + return { + "collaborators": list_of_elements, + "sponsors": study_.study_sponsors.to_dict(), + }, 201 diff --git a/app.py b/app.py index 9d2db72a..0cf3a29e 100644 --- a/app.py +++ b/app.py @@ -8,7 +8,7 @@ import click import jwt -from flask import Flask, request, g +from flask import Flask, g, request from flask_bcrypt import Bcrypt from flask_cors import CORS from flask_mailman import Mail @@ -22,11 +22,7 @@ import config import model from apis import api -from apis.authentication import ( - UnauthenticatedException, - authentication, - authorization, -) +from apis.authentication import UnauthenticatedException, authentication, authorization from apis.exception import ValidationException # from pyfairdatatools import __version__ @@ -268,16 +264,19 @@ def on_after_request(resp): minutes=180 ) session = model.Session.query.get(g.token) - session_expires_at = datetime.datetime.fromtimestamp(session.expires_at, timezone.utc) + session_expires_at = datetime.datetime.fromtimestamp( + session.expires_at, timezone.utc + ) if expired_in - session_expires_at < datetime.timedelta(minutes=90): - new_token = jwt.encode( {"user": decoded["user"], "exp": expired_in, "jti": decoded["jti"]}, config.FAIRHUB_SECRET, algorithm="HS256", ) - resp.set_cookie("token", new_token, secure=True, httponly=True, samesite="None") + resp.set_cookie( + "token", new_token, secure=True, httponly=True, samesite="None" + ) session.expires_at = expired_in.timestamp() app.logger.info("after request") diff --git a/model/__init__.py b/model/__init__.py index 204f4568..ca42005b 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -1,5 +1,5 @@ -from model.dataset_metadata.dataset_contributor import DatasetContributor -from model.dataset_metadata.dataset_related_identifier import DatasetRelatedIdentifier +from .dataset_metadata.dataset_contributor import DatasetContributor +from .dataset_metadata.dataset_related_identifier import DatasetRelatedIdentifier from .dataset import Dataset from .dataset_metadata.dataset_access import DatasetAccess diff --git a/tests/conftest.py b/tests/conftest.py index a27e7009..646c9e80 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,6 +6,7 @@ import pytest from dotenv import load_dotenv + from app import create_app from model.db import db from pytest_config import TestConfig @@ -329,36 +330,36 @@ def clients(flask_app): ) assert response.status_code == 200 - response = _admin_client.post( + a_response = _admin_client.post( "/auth/login", json={ "email_address": "admin@fairhub.io", "password": "Testingyeshello11!", }, ) - assert response.status_code == 200 + assert a_response.status_code == 200 - response = _editor_client.post( + e_response = _editor_client.post( "/auth/login", json={ "email_address": "editor@fairhub.io", "password": "Testingyeshello11!", }, ) - assert response.status_code == 200 + assert e_response.status_code == 200 - response = _viewer_client.post( + v_response = _viewer_client.post( "/auth/login", json={ "email_address": "viewer@fairhub.io", "password": "Testingyeshello11!", }, ) - assert response.status_code == 200 + assert v_response.status_code == 200 meta = db.metadata for table in reversed(meta.sorted_tables): - if table.name == 'session': + if table.name == "session": session_entries = db.session.execute(table.select()).fetchall() assert len(session_entries) == 5 diff --git a/tests/functional/test_050_study_metadata_api.py b/tests/functional/test_050_study_metadata_api.py index 8db5675c..14fa3225 100644 --- a/tests/functional/test_050_study_metadata_api.py +++ b/tests/functional/test_050_study_metadata_api.py @@ -695,11 +695,11 @@ def test_delete_cc_metadata(clients): assert editor_response.status_code == 204 -# ------------------- COLLABORATORS METADATA ------------------- # -def test_post_collaborators_metadata(clients): +# ------------------- TEAM METADATA ------------------- # +def test_post_team_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/collaborators' + WHEN the '/study/{study_id}/metadata/team' endpoint is requested (POST) THEN check that the response is valid and creates the collaborators metadata """ @@ -707,170 +707,792 @@ def test_post_collaborators_metadata(clients): study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) # Add a one second delay to prevent duplicate timestamps sleep(1) - assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_collaborators_id = response_data[0]["id"] + pytest.global_collaborators_id = response_data["collaborators"][0]["id"] + + assert response_data["collaborators"][0]["name"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][0]["identifier_scheme_uri"] == "collaborator1123" + ) - assert response_data[0]["name"] == "collaborator1123" - assert response_data[0]["identifier"] == "collaborator1123" - assert response_data[0]["identifier_scheme"] == "collaborator1123" - assert response_data[0]["identifier_scheme_uri"] == "collaborator1123" + assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + response_data["sponsors"]["responsible_party_investigator_first_name"] == "name" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_value"] + == "identifier" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_scheme"] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert response_data["sponsors"]["lead_sponsor_name"] == "name" + assert response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" admin_response = _admin_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "admin collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "admin collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "admin collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) # Add a one second delay to prevent duplicate timestamps sleep(1) assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) - pytest.global_admin_collaborators_id_admin = admin_response_data[0]["id"] + pytest.global_admin_collaborators_id_admin = admin_response_data["collaborators"][ + 0 + ]["id"] - assert admin_response_data[0]["name"] == "admin collaborator1123" - assert admin_response_data[0]["identifier"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme_uri"] == "collaborator1123" + assert admin_response_data["collaborators"][0]["name"] == "admin collaborator1123" + assert admin_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert admin_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert admin_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert admin_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert admin_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert ( + admin_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) editor_response = _editor_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "editor collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "editor collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "editor collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) - pytest.global_editor_collaborators_id_editor = editor_response_data[0]["id"] + pytest.global_editor_collaborators_id_editor = editor_response_data[ + "collaborators" + ][0]["id"] - assert editor_response_data[0]["name"] == "editor collaborator1123" - assert editor_response_data[0]["identifier"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme_uri"] == "collaborator1123" + assert editor_response_data["collaborators"][0]["name"] == "editor collaborator1123" + assert editor_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert editor_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert editor_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert editor_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + ) + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "editor collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, + ) + + assert viewer_response.status_code == 403 + + +def test_get_team_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/team' endpoint is requested (GET) + THEN check that the response is valid and retrieves the collaborators metadata + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + + response = _logged_in_client.get(f"/study/{study_id}/metadata/team") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/team") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/team") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/team") + + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + assert response_data["collaborators"][0]["name"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert response_data["collaborators"][0]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][0]["identifier_scheme_uri"] == "collaborator1123" + ) + + assert admin_response_data["collaborators"][0]["name"] == "collaborator1123" + assert admin_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert editor_response_data["collaborators"][0]["name"] == "collaborator1123" + assert editor_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert viewer_response_data["collaborators"][0]["name"] == "collaborator1123" + assert viewer_response_data["collaborators"][0]["identifier"] == "collaborator1123" + assert ( + viewer_response_data["collaborators"][0]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + viewer_response_data["collaborators"][0]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert response_data["collaborators"][1]["name"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][1]["identifier_scheme_uri"] == "collaborator1123" + ) + + assert admin_response_data["collaborators"][1]["name"] == "collaborator1123" + assert admin_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert editor_response_data["collaborators"][1]["name"] == "collaborator1123" + assert editor_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert viewer_response_data["collaborators"][1]["name"] == "collaborator1123" + assert viewer_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) + + assert response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + response_data["sponsors"]["responsible_party_investigator_first_name"] == "name" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_title"] == "title" + ) # noqa: E501 + + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_value"] + == "identifier" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_identifier_scheme"] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + response_data["sponsors"]["responsible_party_investigator_affiliation_name"] + == "affiliation" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert response_data["sponsors"]["lead_sponsor_name"] == "name" + assert response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + + assert admin_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + admin_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + admin_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert admin_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert admin_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert admin_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + assert ( + admin_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) + + assert editor_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + editor_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + editor_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert editor_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert editor_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + ) + assert ( + editor_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" + ) + + assert viewer_response_data["sponsors"]["responsible_party_type"] == "Sponsor" + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_first_name"] + == "name" + ) + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_last_name"] + == "surname" + ) + assert ( + viewer_response_data["sponsors"]["responsible_party_investigator_title"] + == "title" + ) # noqa: E501 + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_identifier_value" + ] + == "identifier" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme" + ] + == "scheme" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_identifier_scheme_uri" + ] + == "uri" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_name" + ] + == "affiliation" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_value" + ] + == "identifier" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme" + ] + == "scheme" + ) + assert ( + viewer_response_data["sponsors"][ + "responsible_party_investigator_affiliation_identifier_scheme_uri" + ] + == "uri" + ) + assert viewer_response_data["sponsors"]["lead_sponsor_name"] == "name" + assert viewer_response_data["sponsors"]["lead_sponsor_identifier"] == "identifier" + assert ( + viewer_response_data["sponsors"]["lead_sponsor_identifier_scheme"] == "scheme" + ) + assert ( + viewer_response_data["sponsors"]["lead_sponsor_identifier_scheme_uri"] == "uri" ) - assert viewer_response.status_code == 403 + assert response_data["collaborators"][1]["name"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert response_data["collaborators"][1]["identifier_scheme"] == "collaborator1123" + assert ( + response_data["collaborators"][1]["identifier_scheme_uri"] == "collaborator1123" + ) + assert admin_response_data["collaborators"][1]["name"] == "collaborator1123" + assert admin_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + admin_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + admin_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) -def test_get_collaborators_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/collaborators' endpoint is requested (GET) - THEN check that the response is valid and retrieves the collaborators metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore + assert editor_response_data["collaborators"][1]["name"] == "collaborator1123" + assert editor_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + editor_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + editor_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) - response = _logged_in_client.get(f"/study/{study_id}/metadata/collaborators") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/collaborators") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/collaborators") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/collaborators") + assert viewer_response_data["collaborators"][1]["name"] == "collaborator1123" + assert viewer_response_data["collaborators"][1]["identifier"] == "collaborator1123" + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme"] + == "collaborator1123" + ) + assert ( + viewer_response_data["collaborators"][1]["identifier_scheme_uri"] + == "collaborator1123" + ) - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 + # assert response_data[2]["name"] == "editor collaborator1123" + # assert response_data[2]["identifier"] == "collaborator1123" + # assert response_data[2]["identifier_scheme"] == "collaborator1123" + # assert response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # + # assert admin_response_data[2]["name"] == "editor collaborator1123" + # assert admin_response_data[2]["identifier"] == "collaborator1123" + # assert admin_response_data[2]["identifier_scheme"] == "collaborator1123" + # assert admin_response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # + # assert editor_response_data[2]["name"] == "editor collaborator1123" + # assert editor_response_data[2]["identifier"] == "collaborator1123" + # assert editor_response_data[2]["identifier_scheme"] == "collaborator1123" + # assert editor_response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # + # assert viewer_response_data[2]["name"] == "editor collaborator1123" + # assert viewer_response_data[2]["identifier"] == "collaborator1123" + # assert viewer_response_data[2]["identifier_scheme"] == "collaborator1123" + # assert viewer_response_data[2]["identifier_scheme_uri"] == "collaborator1123" + # - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["name"] == "collaborator1123" - assert response_data[0]["identifier"] == "collaborator1123" - assert response_data[0]["identifier_scheme"] == "collaborator1123" - assert response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert admin_response_data[0]["name"] == "collaborator1123" - assert admin_response_data[0]["identifier"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert editor_response_data[0]["name"] == "collaborator1123" - assert editor_response_data[0]["identifier"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert viewer_response_data[0]["name"] == "collaborator1123" - assert viewer_response_data[0]["identifier"] == "collaborator1123" - assert viewer_response_data[0]["identifier_scheme"] == "collaborator1123" - assert viewer_response_data[0]["identifier_scheme_uri"] == "collaborator1123" - - assert response_data[1]["name"] == "admin collaborator1123" - assert response_data[1]["identifier"] == "collaborator1123" - assert response_data[1]["identifier_scheme"] == "collaborator1123" - assert response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert admin_response_data[1]["name"] == "admin collaborator1123" - assert admin_response_data[1]["identifier"] == "collaborator1123" - assert admin_response_data[1]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert editor_response_data[1]["name"] == "admin collaborator1123" - assert editor_response_data[1]["identifier"] == "collaborator1123" - assert editor_response_data[1]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert viewer_response_data[1]["name"] == "admin collaborator1123" - assert viewer_response_data[1]["identifier"] == "collaborator1123" - assert viewer_response_data[1]["identifier_scheme"] == "collaborator1123" - assert viewer_response_data[1]["identifier_scheme_uri"] == "collaborator1123" - - assert response_data[2]["name"] == "editor collaborator1123" - assert response_data[2]["identifier"] == "collaborator1123" - assert response_data[2]["identifier_scheme"] == "collaborator1123" - assert response_data[2]["identifier_scheme_uri"] == "collaborator1123" - - assert admin_response_data[2]["name"] == "editor collaborator1123" - assert admin_response_data[2]["identifier"] == "collaborator1123" - assert admin_response_data[2]["identifier_scheme"] == "collaborator1123" - assert admin_response_data[2]["identifier_scheme_uri"] == "collaborator1123" - - assert editor_response_data[2]["name"] == "editor collaborator1123" - assert editor_response_data[2]["identifier"] == "collaborator1123" - assert editor_response_data[2]["identifier_scheme"] == "collaborator1123" - assert editor_response_data[2]["identifier_scheme_uri"] == "collaborator1123" - - assert viewer_response_data[2]["name"] == "editor collaborator1123" - assert viewer_response_data[2]["identifier"] == "collaborator1123" - assert viewer_response_data[2]["identifier_scheme"] == "collaborator1123" - assert viewer_response_data[2]["identifier_scheme_uri"] == "collaborator1123" +# ------------------- COLLABORATORS DELETE METADATA ------------------- # def test_delete_collaborators_metadata(clients): @@ -904,114 +1526,359 @@ def test_delete_collaborators_metadata(clients): assert editor_response.status_code == 204 -# # ------------------- CONDITIONS METADATA ------------------- # -def test_post_conditions_metadata(clients): +# ------------------- DESCRIPTION METADATA ------------------- # +def test_post_description_metadata(clients): """ GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (POST) - THEN check that the response is valid and creates the conditions metadata + WHEN the '/study/{study_id}/metadata/description' endpoint is requested (POST) + THEN check that the response is valid and creates the description metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore response = _logged_in_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) - assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_conditions_id = response_data[0]["id"] + pytest.global_identification_id = response_data["identification"]["secondary"][0][ + "id" + ] + + pytest.global_keywords_id = response_data["keywords"][0]["id"] + pytest.global_conditions_id = response_data["conditions"][0]["id"] - assert response_data[0]["name"] == "condition" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["condition_uri"] == "condition" + assert response_data["conditions"][0]["name"] == "condition" + assert ( + response_data["conditions"][0]["classification_code"] == "classification code" + ) + assert response_data["conditions"][0]["scheme"] == "scheme" + assert response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert response_data["conditions"][0]["condition_uri"] == "condition" + + assert response_data["keywords"][0]["name"] == "keywords" + assert response_data["keywords"][0]["classification_code"] == "classification code" + assert response_data["keywords"][0]["scheme"] == "scheme" + assert response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert response_data["keywords"][0]["keyword_uri"] == "keywords" + assert response_data["identification"]["primary"]["identifier"] == "first" + assert response_data["identification"]["primary"]["identifier_type"] == "test" + assert response_data["identification"]["primary"]["identifier_domain"] == "domain" + assert response_data["identification"]["primary"]["identifier_link"] == "link" + assert response_data["identification"]["secondary"][0]["identifier"] == "test" + assert response_data["identification"]["secondary"][0]["identifier_type"] == "test" + assert ( + response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert response_data["identification"]["secondary"][0]["identifier_link"] == "link" + + assert response_data["description"]["brief_summary"] == "brief_summary" + assert ( + response_data["description"]["detailed_description"] == "detailed_description" + ) admin_response = _admin_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "admin condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) - pytest.global_admin_conditions_id_admin = admin_response_data[0]["id"] + pytest.global_identification_id_admin = admin_response_data["identification"][ + "secondary" + ][1]["id"] + pytest.global_admin_keywords_id_admin = admin_response_data["keywords"][0]["id"] + pytest.global_admin_conditions_id_admin = admin_response_data["conditions"][0]["id"] + + assert admin_response_data["conditions"][0]["name"] == "condition" + assert ( + admin_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert admin_response_data["conditions"][0]["scheme"] == "scheme" + assert admin_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["conditions"][0]["condition_uri"] == "condition" + + assert admin_response_data["keywords"][0]["name"] == "keywords" + assert ( + admin_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert admin_response_data["keywords"][0]["scheme"] == "scheme" + assert admin_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["keywords"][0]["keyword_uri"] == "keywords" + assert admin_response_data["identification"]["primary"]["identifier"] == "first" + assert admin_response_data["identification"]["primary"]["identifier_type"] == "test" + assert ( + admin_response_data["identification"]["primary"]["identifier_domain"] + == "domain" + ) + assert admin_response_data["identification"]["primary"]["identifier_link"] == "link" + assert admin_response_data["identification"]["secondary"][0]["identifier"] == "test" + assert ( + admin_response_data["identification"]["secondary"][0]["identifier_type"] + == "test" + ) + assert ( + admin_response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert ( + admin_response_data["identification"]["secondary"][0]["identifier_link"] + == "link" + ) - assert admin_response_data[0]["name"] == "admin condition" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["condition_uri"] == "condition" + assert admin_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + admin_response_data["description"]["detailed_description"] + == "detailed_description" + ) editor_response = _editor_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "editor condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) - pytest.global_editor_conditions_id_editor = editor_response_data[0]["id"] - assert editor_response_data[0]["name"] == "editor condition" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["condition_uri"] == "condition" + pytest.global_identification_id_editor = editor_response_data["identification"][ + "secondary" + ][2]["id"] + pytest.global_editor_keywords_id_editor = editor_response_data["keywords"][0]["id"] + pytest.global_editor_conditions_id_editor = editor_response_data["conditions"][0][ + "id" + ] + + assert editor_response_data["conditions"][0]["name"] == "condition" + assert ( + editor_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["conditions"][0]["scheme"] == "scheme" + assert editor_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["conditions"][0]["condition_uri"] == "condition" + + assert editor_response_data["keywords"][0]["name"] == "keywords" + assert ( + editor_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["keywords"][0]["scheme"] == "scheme" + assert editor_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["keywords"][0]["keyword_uri"] == "keywords" + assert editor_response_data["identification"]["primary"]["identifier"] == "first" + assert ( + editor_response_data["identification"]["primary"]["identifier_type"] == "test" + ) + assert ( + editor_response_data["identification"]["primary"]["identifier_domain"] + == "domain" + ) + assert ( + editor_response_data["identification"]["primary"]["identifier_link"] == "link" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier"] == "test" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier_type"] + == "test" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert ( + editor_response_data["identification"]["secondary"][0]["identifier_link"] + == "link" + ) + + assert editor_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + editor_response_data["description"]["detailed_description"] + == "detailed_description" + ) viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "editor condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], + f"/study/{study_id}/metadata/description", + json={ + "conditions": [ + { + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", + } + ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, + }, ) assert viewer_response.status_code == 403 -def test_get_conditions_metadata(clients): +def test_get_description_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) - THEN check that the response is valid and retrieves the conditions metadata + WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) + THEN check that the response is valid and retrieves the description metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - response = _logged_in_client.get(f"/study/{study_id}/metadata/conditions") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/conditions") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/conditions") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/conditions") + response = _logged_in_client.get(f"/study/{study_id}/metadata/description") + admin_response = _admin_client.get(f"/study/{study_id}/metadata/description") + editor_response = _editor_client.get(f"/study/{study_id}/metadata/description") + viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/description") assert response.status_code == 200 assert admin_response.status_code == 200 @@ -1023,323 +1890,139 @@ def test_get_conditions_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["name"] == "condition" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["condition_uri"] == "condition" - - assert admin_response_data[0]["name"] == "condition" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["condition_uri"] == "condition" - - assert editor_response_data[0]["name"] == "condition" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["condition_uri"] == "condition" - - assert viewer_response_data[0]["name"] == "condition" - assert viewer_response_data[0]["classification_code"] == "classification code" - assert viewer_response_data[0]["scheme"] == "scheme" - assert viewer_response_data[0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[0]["condition_uri"] == "condition" - - assert response_data[1]["name"] == "admin condition" - assert response_data[1]["classification_code"] == "classification code" - assert response_data[1]["scheme"] == "scheme" - assert response_data[1]["scheme_uri"] == "scheme uri" - assert response_data[1]["condition_uri"] == "condition" - - assert admin_response_data[1]["name"] == "admin condition" - assert admin_response_data[1]["classification_code"] == "classification code" - assert admin_response_data[1]["scheme"] == "scheme" - assert admin_response_data[1]["scheme_uri"] == "scheme uri" - assert admin_response_data[1]["condition_uri"] == "condition" - - assert editor_response_data[1]["name"] == "admin condition" - assert editor_response_data[1]["classification_code"] == "classification code" - assert editor_response_data[1]["scheme"] == "scheme" - assert editor_response_data[1]["scheme_uri"] == "scheme uri" - assert editor_response_data[1]["condition_uri"] == "condition" - - assert viewer_response_data[1]["name"] == "admin condition" - assert viewer_response_data[1]["classification_code"] == "classification code" - assert viewer_response_data[1]["scheme"] == "scheme" - assert viewer_response_data[1]["scheme_uri"] == "scheme uri" - assert viewer_response_data[1]["condition_uri"] == "condition" - - assert response_data[2]["name"] == "editor condition" - assert response_data[2]["classification_code"] == "classification code" - assert response_data[2]["scheme"] == "scheme" - assert response_data[2]["scheme_uri"] == "scheme uri" - assert response_data[2]["condition_uri"] == "condition" - - assert admin_response_data[2]["name"] == "editor condition" - assert admin_response_data[2]["classification_code"] == "classification code" - assert admin_response_data[2]["scheme"] == "scheme" - assert admin_response_data[2]["scheme_uri"] == "scheme uri" - assert admin_response_data[2]["condition_uri"] == "condition" - - assert editor_response_data[2]["name"] == "editor condition" - assert editor_response_data[2]["classification_code"] == "classification code" - assert editor_response_data[2]["scheme"] == "scheme" - assert editor_response_data[2]["scheme_uri"] == "scheme uri" - assert editor_response_data[2]["condition_uri"] == "condition" - - assert viewer_response_data[2]["name"] == "editor condition" - assert viewer_response_data[2]["classification_code"] == "classification code" - assert viewer_response_data[2]["scheme"] == "scheme" - assert viewer_response_data[2]["scheme_uri"] == "scheme uri" - assert viewer_response_data[2]["condition_uri"] == "condition" - - -def test_delete_conditions_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) - THEN check that the response is valid and retrieves the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - conditions_id = pytest.global_conditions_id - admin_conditions_id = pytest.global_admin_conditions_id_admin - editor_conditions_id = pytest.global_editor_conditions_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/conditions/{conditions_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/metadata/conditions/{conditions_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/conditions/{admin_conditions_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/conditions/{editor_conditions_id}" + assert response_data["description"]["brief_summary"] == "brief_summary" + assert ( + response_data["description"]["detailed_description"] == "detailed_description" ) - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- KEYWORDS METADATA ------------------- # -def test_post_keywords_metadata(clients): - """ - GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (POST) - THEN check that the response is valid and creates the keywords metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert admin_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + admin_response_data["description"]["detailed_description"] + == "detailed_description" ) - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_keywords_id = response_data[0]["id"] - assert response_data[0]["name"] == "keywords" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["keyword_uri"] == "keywords" - - admin_response = _admin_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "admin keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert editor_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + editor_response_data["description"]["detailed_description"] + == "detailed_description" ) - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_admin_keywords_id_admin = admin_response_data[0]["id"] - assert admin_response_data[0]["name"] == "admin keywords" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["keyword_uri"] == "keywords" - - editor_response = _editor_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "editor keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert viewer_response_data["description"]["brief_summary"] == "brief_summary" + assert ( + viewer_response_data["description"]["detailed_description"] + == "detailed_description" ) - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_editor_keywords_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["name"] == "editor keywords" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["keyword_uri"] == "keywords" + assert response_data["conditions"][0]["name"] == "condition" + assert ( + response_data["conditions"][0]["classification_code"] == "classification code" + ) + assert response_data["conditions"][0]["scheme"] == "scheme" + assert response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert response_data["conditions"][0]["condition_uri"] == "condition" - viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "editor keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + assert admin_response_data["conditions"][0]["name"] == "condition" + assert ( + admin_response_data["conditions"][0]["classification_code"] + == "classification code" ) + assert admin_response_data["conditions"][0]["scheme"] == "scheme" + assert admin_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["conditions"][0]["condition_uri"] == "condition" - assert viewer_response.status_code == 403 + assert editor_response_data["conditions"][0]["name"] == "condition" + assert ( + editor_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["conditions"][0]["scheme"] == "scheme" + assert editor_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["conditions"][0]["condition_uri"] == "condition" + assert viewer_response_data["conditions"][0]["name"] == "condition" + assert ( + viewer_response_data["conditions"][0]["classification_code"] + == "classification code" + ) + assert viewer_response_data["conditions"][0]["scheme"] == "scheme" + assert viewer_response_data["conditions"][0]["scheme_uri"] == "scheme uri" + assert viewer_response_data["conditions"][0]["condition_uri"] == "condition" -def test_get_keywords_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) - THEN check that the response is valid and retrieves the keywords metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore + assert response_data["keywords"][0]["name"] == "keywords" + assert response_data["keywords"][0]["classification_code"] == "classification code" + assert response_data["keywords"][0]["scheme"] == "scheme" + assert response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert response_data["keywords"][0]["keyword_uri"] == "keywords" - response = _logged_in_client.get(f"/study/{study_id}/metadata/keywords") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/keywords") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/keywords") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/keywords") + assert admin_response_data["keywords"][0]["name"] == "keywords" + assert ( + admin_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert admin_response_data["keywords"][0]["scheme"] == "scheme" + assert admin_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert admin_response_data["keywords"][0]["keyword_uri"] == "keywords" - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 + assert editor_response_data["keywords"][0]["name"] == "keywords" + assert ( + editor_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert editor_response_data["keywords"][0]["scheme"] == "scheme" + assert editor_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert editor_response_data["keywords"][0]["keyword_uri"] == "keywords" - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) + assert viewer_response_data["keywords"][0]["name"] == "keywords" + assert ( + viewer_response_data["keywords"][0]["classification_code"] + == "classification code" + ) + assert viewer_response_data["keywords"][0]["scheme"] == "scheme" + assert viewer_response_data["keywords"][0]["scheme_uri"] == "scheme uri" + assert viewer_response_data["keywords"][0]["keyword_uri"] == "keywords" - assert response_data[0]["name"] == "keywords" - assert response_data[0]["classification_code"] == "classification code" - assert response_data[0]["scheme"] == "scheme" - assert response_data[0]["scheme_uri"] == "scheme uri" - assert response_data[0]["keyword_uri"] == "keywords" - - assert admin_response_data[0]["name"] == "keywords" - assert admin_response_data[0]["classification_code"] == "classification code" - assert admin_response_data[0]["scheme"] == "scheme" - assert admin_response_data[0]["scheme_uri"] == "scheme uri" - assert admin_response_data[0]["keyword_uri"] == "keywords" - - assert editor_response_data[0]["name"] == "keywords" - assert editor_response_data[0]["classification_code"] == "classification code" - assert editor_response_data[0]["scheme"] == "scheme" - assert editor_response_data[0]["scheme_uri"] == "scheme uri" - assert editor_response_data[0]["keyword_uri"] == "keywords" - - assert viewer_response_data[0]["name"] == "keywords" - assert viewer_response_data[0]["classification_code"] == "classification code" - assert viewer_response_data[0]["scheme"] == "scheme" - assert viewer_response_data[0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[0]["keyword_uri"] == "keywords" - - assert response_data[1]["name"] == "admin keywords" - assert response_data[1]["classification_code"] == "classification code" - assert response_data[1]["scheme"] == "scheme" - assert response_data[1]["scheme_uri"] == "scheme uri" - assert response_data[1]["keyword_uri"] == "keywords" - - assert admin_response_data[1]["name"] == "admin keywords" - assert admin_response_data[1]["classification_code"] == "classification code" - assert admin_response_data[1]["scheme"] == "scheme" - assert admin_response_data[1]["scheme_uri"] == "scheme uri" - assert admin_response_data[1]["keyword_uri"] == "keywords" - - assert editor_response_data[1]["name"] == "admin keywords" - assert editor_response_data[1]["classification_code"] == "classification code" - assert editor_response_data[1]["scheme"] == "scheme" - assert editor_response_data[1]["scheme_uri"] == "scheme uri" - assert editor_response_data[1]["keyword_uri"] == "keywords" - - assert viewer_response_data[1]["name"] == "admin keywords" - assert viewer_response_data[1]["classification_code"] == "classification code" - assert viewer_response_data[1]["scheme"] == "scheme" - assert viewer_response_data[1]["scheme_uri"] == "scheme uri" - assert viewer_response_data[1]["keyword_uri"] == "keywords" - - assert response_data[2]["name"] == "editor keywords" - assert response_data[2]["classification_code"] == "classification code" - assert response_data[2]["scheme"] == "scheme" - assert response_data[2]["scheme_uri"] == "scheme uri" - assert response_data[2]["keyword_uri"] == "keywords" - - assert admin_response_data[2]["name"] == "editor keywords" - assert admin_response_data[2]["classification_code"] == "classification code" - assert admin_response_data[2]["scheme"] == "scheme" - assert admin_response_data[2]["scheme_uri"] == "scheme uri" - assert admin_response_data[2]["keyword_uri"] == "keywords" - - assert editor_response_data[2]["name"] == "editor keywords" - assert editor_response_data[2]["classification_code"] == "classification code" - assert editor_response_data[2]["scheme"] == "scheme" - assert editor_response_data[2]["scheme_uri"] == "scheme uri" - assert editor_response_data[2]["keyword_uri"] == "keywords" - - assert viewer_response_data[2]["name"] == "editor keywords" - assert viewer_response_data[2]["classification_code"] == "classification code" - assert viewer_response_data[2]["scheme"] == "scheme" - assert viewer_response_data[2]["scheme_uri"] == "scheme uri" - assert viewer_response_data[2]["keyword_uri"] == "keywords" + assert response_data["identification"]["primary"]["identifier"] == "first" + assert response_data["identification"]["primary"]["identifier_type"] == "test" + assert response_data["identification"]["primary"]["identifier_domain"] == "domain" + assert response_data["identification"]["primary"]["identifier_link"] == "link" + assert response_data["identification"]["secondary"][0]["identifier"] == "test" + assert response_data["identification"]["secondary"][0]["identifier_type"] == "test" + assert ( + response_data["identification"]["secondary"][0]["identifier_domain"] + == "dodfasdfmain" + ) + assert response_data["identification"]["secondary"][0]["identifier_link"] == "link" + assert response_data["identification"]["secondary"][1]["identifier"] == "test" + assert response_data["identification"]["secondary"][1]["identifier_type"] == "test" -def test_delete_keywords_metadata(clients): +# ------------------- IDENTIFICATION METADATA ------------------- # +def test_delete_identification_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) + WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) THEN check that the response is valid and retrieves the identification metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore - keywords_id = pytest.global_keywords_id - admin_keywords_id = pytest.global_admin_keywords_id_admin - editor_keywords_id = pytest.global_editor_keywords_id_editor + identification_id = pytest.global_identification_id + admin_identification_id = pytest.global_identification_id_admin + editor_identification_id = pytest.global_identification_id_editor viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/keywords/{keywords_id}" + f"/study/{study_id}/metadata/identification/{identification_id}" ) + response = _logged_in_client.delete( - f"/study/{study_id}/metadata/keywords/{keywords_id}" + f"/study/{study_id}/metadata/identification/{identification_id}" ) + admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/keywords/{admin_keywords_id}" + f"/study/{study_id}/metadata/identification/{admin_identification_id}" ) + editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/keywords/{editor_keywords_id}" + f"/study/{study_id}/metadata/identification/{editor_identification_id}" ) assert viewer_response.status_code == 403 @@ -1348,104 +2031,68 @@ def test_delete_keywords_metadata(clients): assert editor_response.status_code == 204 -# ------------------- DESCRIPTION METADATA ------------------- # -def test_put_description_metadata(clients): +# ------------------- CONDITIONS METADATA ------------------- # +def test_delete_conditions_metadata(clients): """ - GIVEN a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/description' endpoint is requested (POST) - THEN check that the response is valid and creates the description metadata + Given a Flask application configured for testing and a study ID + WHEN the '/study/{study_id}/metadata/conditions' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore + conditions_id = pytest.global_conditions_id + admin_conditions_id = pytest.global_admin_conditions_id_admin + editor_conditions_id = pytest.global_editor_conditions_id_editor - response = _logged_in_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "brief_summary", - "detailed_description": "detailed_description", - }, + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/conditions/{conditions_id}" ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["brief_summary"] == "brief_summary" - assert response_data["detailed_description"] == "detailed_description" - - admin_response = _admin_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "admin-brief_summary", - "detailed_description": "admin-detailed_description", - }, + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/conditions/{conditions_id}" ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["brief_summary"] == "admin-brief_summary" - assert admin_response_data["detailed_description"] == "admin-detailed_description" - - editor_response = _editor_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "editor-brief_summary", - "detailed_description": "editor-detailed_description", - }, + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/conditions/{admin_conditions_id}" ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["brief_summary"] == "editor-brief_summary" - assert editor_response_data["detailed_description"] == "editor-detailed_description" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/metadata/description", - json={ - "brief_summary": "viewer-brief_summary", - "detailed_description": "viewer-detailed_description", - }, + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/conditions/{editor_conditions_id}" ) assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_get_description_metadata(clients): +# ------------------- KEYWORDS METADATA ------------------- # +def test_delete_keywords_metadata(clients): """ Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/description' endpoint is requested (GET) - THEN check that the response is valid and retrieves the description metadata + WHEN the '/study/{study_id}/metadata/keywords' endpoint is requested (GET) + THEN check that the response is valid and retrieves the identification metadata """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore + keywords_id = pytest.global_keywords_id + admin_keywords_id = pytest.global_admin_keywords_id_admin + editor_keywords_id = pytest.global_editor_keywords_id_editor - response = _logged_in_client.get(f"/study/{study_id}/metadata/description") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/description") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/description") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/description") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["brief_summary"] == "editor-brief_summary" - assert response_data["detailed_description"] == "editor-detailed_description" - - assert admin_response_data["brief_summary"] == "editor-brief_summary" - assert admin_response_data["detailed_description"] == "editor-detailed_description" - - assert editor_response_data["brief_summary"] == "editor-brief_summary" - assert editor_response_data["detailed_description"] == "editor-detailed_description" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/metadata/keywords/{keywords_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/metadata/keywords/{keywords_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/metadata/keywords/{admin_keywords_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/metadata/keywords/{editor_keywords_id}" + ) - assert viewer_response_data["brief_summary"] == "editor-brief_summary" - assert viewer_response_data["detailed_description"] == "editor-detailed_description" + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 # ------------------- DESIGN METADATA ------------------- # @@ -1960,274 +2607,14 @@ def test_get_eligibility_metadata(clients): assert viewer_response_data["gender_based"] == "Yes" assert viewer_response_data["gender_description"] == "editor-none" assert viewer_response_data["minimum_age_value"] == 18 - assert viewer_response_data["maximum_age_value"] == 61 - assert viewer_response_data["minimum_age_unit"] == "1" - assert viewer_response_data["maximum_age_unit"] == "2" - assert viewer_response_data["healthy_volunteers"] == "Yes" - assert viewer_response_data["inclusion_criteria"] == ["tests"] - assert viewer_response_data["exclusion_criteria"] == ["Probability Sample"] - assert viewer_response_data["study_population"] == "study_population" - assert viewer_response_data["sampling_method"] == "Probability Sample" - - -# ------------------- IDENTIFICATION METADATA ------------------- # -def test_post_identification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (POST) - THEN check that the response is valid and creates the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_identification_id = response_data["secondary"][0]["id"] - - assert response_data["primary"]["identifier"] == "first" - assert response_data["primary"]["identifier_type"] == "test" - assert response_data["primary"]["identifier_domain"] == "domain" - assert response_data["primary"]["identifier_link"] == "link" - assert response_data["secondary"][0]["identifier"] == "test" - assert response_data["secondary"][0]["identifier_type"] == "test" - assert response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][0]["identifier_link"] == "link" - - admin_response = _admin_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "admin-first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_identification_id_admin = admin_response_data["secondary"][1]["id"] - - assert admin_response_data["primary"]["identifier"] == "admin-first" - assert admin_response_data["primary"]["identifier_type"] == "test" - assert admin_response_data["primary"]["identifier_domain"] == "domain" - assert admin_response_data["primary"]["identifier_link"] == "link" - assert admin_response_data["secondary"][1]["identifier"] == "test" - assert admin_response_data["secondary"][1]["identifier_type"] == "test" - assert admin_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][1]["identifier_link"] == "link" - - editor_response = _editor_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "editor-first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_identification_id_editor = editor_response_data["secondary"][2]["id"] - - assert editor_response_data["primary"]["identifier"] == "editor-first" - assert editor_response_data["primary"]["identifier_type"] == "test" - assert editor_response_data["primary"]["identifier_domain"] == "domain" - assert editor_response_data["primary"]["identifier_link"] == "link" - assert editor_response_data["secondary"][2]["identifier"] == "test" - assert editor_response_data["secondary"][2]["identifier_type"] == "test" - assert editor_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][2]["identifier_link"] == "link" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/metadata/identification", - json={ - "primary": { - "identifier": "viewer-first", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ - { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", - } - ], - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_identification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) - THEN check that the response is valid and retrieves the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/identification") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/identification") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/identification") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/identification") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["primary"]["identifier"] == "editor-first" - assert response_data["primary"]["identifier_type"] == "test" - assert response_data["primary"]["identifier_domain"] == "domain" - assert response_data["primary"]["identifier_link"] == "link" - assert response_data["secondary"][0]["identifier"] == "test" - assert response_data["secondary"][0]["identifier_type"] == "test" - assert response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][0]["identifier_link"] == "link" - assert response_data["secondary"][1]["identifier"] == "test" - assert response_data["secondary"][1]["identifier_type"] == "test" - assert response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][1]["identifier_link"] == "link" - assert response_data["secondary"][2]["identifier"] == "test" - assert response_data["secondary"][2]["identifier_type"] == "test" - assert response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert response_data["secondary"][2]["identifier_link"] == "link" - - assert admin_response_data["primary"]["identifier"] == "editor-first" - assert admin_response_data["primary"]["identifier_type"] == "test" - assert admin_response_data["primary"]["identifier_domain"] == "domain" - assert admin_response_data["primary"]["identifier_link"] == "link" - assert admin_response_data["secondary"][0]["identifier"] == "test" - assert admin_response_data["secondary"][0]["identifier_type"] == "test" - assert admin_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][0]["identifier_link"] == "link" - assert admin_response_data["secondary"][1]["identifier"] == "test" - assert admin_response_data["secondary"][1]["identifier_type"] == "test" - assert admin_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][1]["identifier_link"] == "link" - assert admin_response_data["secondary"][2]["identifier"] == "test" - assert admin_response_data["secondary"][2]["identifier_type"] == "test" - assert admin_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert admin_response_data["secondary"][2]["identifier_link"] == "link" - - assert editor_response_data["primary"]["identifier"] == "editor-first" - assert editor_response_data["primary"]["identifier_type"] == "test" - assert editor_response_data["primary"]["identifier_domain"] == "domain" - assert editor_response_data["primary"]["identifier_link"] == "link" - assert editor_response_data["secondary"][0]["identifier"] == "test" - assert editor_response_data["secondary"][0]["identifier_type"] == "test" - assert editor_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][0]["identifier_link"] == "link" - assert editor_response_data["secondary"][1]["identifier"] == "test" - assert editor_response_data["secondary"][1]["identifier_type"] == "test" - assert editor_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][1]["identifier_link"] == "link" - assert editor_response_data["secondary"][2]["identifier"] == "test" - assert editor_response_data["secondary"][2]["identifier_type"] == "test" - assert editor_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert editor_response_data["secondary"][2]["identifier_link"] == "link" - - assert viewer_response_data["primary"]["identifier"] == "editor-first" - assert viewer_response_data["primary"]["identifier_type"] == "test" - assert viewer_response_data["primary"]["identifier_domain"] == "domain" - assert viewer_response_data["primary"]["identifier_link"] == "link" - assert viewer_response_data["secondary"][0]["identifier"] == "test" - assert viewer_response_data["secondary"][0]["identifier_type"] == "test" - assert viewer_response_data["secondary"][0]["identifier_domain"] == "dodfasdfmain" - assert viewer_response_data["secondary"][0]["identifier_link"] == "link" - assert viewer_response_data["secondary"][1]["identifier"] == "test" - assert viewer_response_data["secondary"][1]["identifier_type"] == "test" - assert viewer_response_data["secondary"][1]["identifier_domain"] == "dodfasdfmain" - assert viewer_response_data["secondary"][1]["identifier_link"] == "link" - assert viewer_response_data["secondary"][2]["identifier"] == "test" - assert viewer_response_data["secondary"][2]["identifier_type"] == "test" - assert viewer_response_data["secondary"][2]["identifier_domain"] == "dodfasdfmain" - assert viewer_response_data["secondary"][2]["identifier_link"] == "link" - - -def test_delete_identification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/identification' endpoint is requested (GET) - THEN check that the response is valid and retrieves the identification metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - identification_id = pytest.global_identification_id - admin_identification_id = pytest.global_identification_id_admin - editor_identification_id = pytest.global_identification_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/metadata/identification/{identification_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/metadata/identification/{identification_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/metadata/identification/{admin_identification_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/metadata/identification/{editor_identification_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + assert viewer_response_data["maximum_age_value"] == 61 + assert viewer_response_data["minimum_age_unit"] == "1" + assert viewer_response_data["maximum_age_unit"] == "2" + assert viewer_response_data["healthy_volunteers"] == "Yes" + assert viewer_response_data["inclusion_criteria"] == ["tests"] + assert viewer_response_data["exclusion_criteria"] == ["Probability Sample"] + assert viewer_response_data["study_population"] == "study_population" + assert viewer_response_data["sampling_method"] == "Probability Sample" # ------------------- INTERVENTION METADATA ------------------- # @@ -3146,434 +3533,6 @@ def test_get_oversight_metadata(clients): assert viewer_response_data["human_subject_review_status"] == "yes" -# ------------------- SPONSORS METADATA ------------------- # -def test_put_sponsors_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (PUT) - THEN check that the response is valid and updates the sponsors metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - }, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["responsible_party_type"] == "Sponsor" - assert response_data["responsible_party_investigator_first_name"] == "name" - assert response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - response_data["responsible_party_investigator_identifier_value"] == "identifier" - ) - assert response_data["responsible_party_investigator_identifier_scheme"] == "scheme" - assert ( - response_data["responsible_party_investigator_identifier_scheme_uri"] == "uri" - ) - assert ( - response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_value"] - == "identifier" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_scheme"] - == "scheme" - ) - assert ( - response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert response_data["lead_sponsor_name"] == "name" - assert response_data["lead_sponsor_identifier"] == "identifier" - assert response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - admin_response = _admin_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - }, - ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["responsible_party_type"] == "Sponsor" - assert admin_response_data["responsible_party_investigator_first_name"] == "name" - assert admin_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - admin_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - admin_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - admin_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert admin_response_data["lead_sponsor_name"] == "name" - assert admin_response_data["lead_sponsor_identifier"] == "identifier" - assert admin_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert admin_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - editor_response = _editor_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - }, - ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["responsible_party_type"] == "Sponsor" - assert editor_response_data["responsible_party_investigator_first_name"] == "name" - assert editor_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - editor_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - editor_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - editor_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert editor_response_data["lead_sponsor_name"] == "name" - assert editor_response_data["lead_sponsor_identifier"] == "identifier" - assert editor_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert editor_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/metadata/sponsor", - json={ - "responsible_party_type": "Sponsor", - "responsible_party_investigator_first_name": "name", - "responsible_party_investigator_last_name": "surname", - "responsible_party_investigator_title": "title", - "responsible_party_investigator_identifier_value": "identifier", - "responsible_party_investigator_identifier_scheme": "scheme", - "responsible_party_investigator_identifier_scheme_uri": "uri", - "responsible_party_investigator_affiliation_name": "affiliation", - "responsible_party_investigator_affiliation_identifier_value": "identifier", - "responsible_party_investigator_affiliation_identifier_scheme": "scheme", - "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", - "lead_sponsor_name": "name", - "lead_sponsor_identifier": "identifier", - "lead_sponsor_identifier_scheme": "scheme", - "lead_sponsor_identifier_scheme_uri": "uri", - }, - ) - - assert viewer_response.status_code == 403 - - -def test_get_sponsors_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - WHEN the '/study/{study_id}/metadata/sponsors' endpoint is requested (GET) - THEN check that the response is valid and retrieves the sponsors metadata - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - - response = _logged_in_client.get(f"/study/{study_id}/metadata/sponsor") - admin_response = _admin_client.get(f"/study/{study_id}/metadata/sponsor") - editor_response = _editor_client.get(f"/study/{study_id}/metadata/sponsor") - viewer_response = _viewer_client.get(f"/study/{study_id}/metadata/sponsor") - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["responsible_party_type"] == "Sponsor" - assert response_data["responsible_party_investigator_first_name"] == "name" - assert response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - - assert ( - response_data["responsible_party_investigator_identifier_value"] == "identifier" - ) - assert response_data["responsible_party_investigator_identifier_scheme"] == "scheme" - assert ( - response_data["responsible_party_investigator_identifier_scheme_uri"] == "uri" - ) - assert ( - response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_value"] - == "identifier" - ) - assert ( - response_data["responsible_party_investigator_affiliation_identifier_scheme"] - == "scheme" - ) - assert ( - response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert response_data["lead_sponsor_name"] == "name" - assert response_data["lead_sponsor_identifier"] == "identifier" - assert response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - assert admin_response_data["responsible_party_type"] == "Sponsor" - assert admin_response_data["responsible_party_investigator_first_name"] == "name" - assert admin_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - admin_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - admin_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - admin_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - admin_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - admin_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert admin_response_data["lead_sponsor_name"] == "name" - assert admin_response_data["lead_sponsor_identifier"] == "identifier" - assert admin_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert admin_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - assert editor_response_data["responsible_party_type"] == "Sponsor" - assert editor_response_data["responsible_party_investigator_first_name"] == "name" - assert editor_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - editor_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - editor_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - editor_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - editor_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - editor_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert editor_response_data["lead_sponsor_name"] == "name" - assert editor_response_data["lead_sponsor_identifier"] == "identifier" - assert editor_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert editor_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - assert viewer_response_data["responsible_party_type"] == "Sponsor" - assert viewer_response_data["responsible_party_investigator_first_name"] == "name" - assert viewer_response_data["responsible_party_investigator_last_name"] == "surname" - assert ( - viewer_response_data["responsible_party_investigator_title"] == "title" - ) # noqa: E501 - assert ( - viewer_response_data["responsible_party_investigator_identifier_value"] - == "identifier" - ) - assert ( - viewer_response_data["responsible_party_investigator_identifier_scheme"] - == "scheme" - ) - assert ( - viewer_response_data["responsible_party_investigator_identifier_scheme_uri"] - == "uri" - ) - assert ( - viewer_response_data["responsible_party_investigator_affiliation_name"] - == "affiliation" - ) - assert ( - viewer_response_data[ - "responsible_party_investigator_affiliation_identifier_value" - ] - == "identifier" - ) - assert ( - viewer_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme" - ] - == "scheme" - ) - assert ( - viewer_response_data[ - "responsible_party_investigator_affiliation_identifier_scheme_uri" - ] - == "uri" - ) - assert viewer_response_data["lead_sponsor_name"] == "name" - assert viewer_response_data["lead_sponsor_identifier"] == "identifier" - assert viewer_response_data["lead_sponsor_identifier_scheme"] == "scheme" - assert viewer_response_data["lead_sponsor_identifier_scheme_uri"] == "uri" - - # ------------------- STATUS METADATA ------------------- # def test_put_status_metadata(clients): """ diff --git a/tests/functional/test_060_study_version_api.py b/tests/functional/test_060_study_version_api.py index 0b326d74..2ceee352 100644 --- a/tests/functional/test_060_study_version_api.py +++ b/tests/functional/test_060_study_version_api.py @@ -280,23 +280,47 @@ def test_get_version_study_metadata(clients): } ], ) - id_response = _logged_in_client.post( - f"/study/{study_id}/metadata/identification", + description_response = _logged_in_client.post( + f"/study/{study_id}/metadata/description", json={ - "primary": { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "domain", - "identifier_link": "link", - }, - "secondary": [ + "conditions": [ { - "identifier": "test", - "identifier_type": "test", - "identifier_domain": "dodfasdfmain", - "identifier_link": "link", + "name": "condition", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "condition_uri": "condition", + } + ], + "keywords": [ + { + "name": "keywords", + "classification_code": "classification code", + "scheme": "scheme", + "scheme_uri": "scheme uri", + "keyword_uri": "keywords", } ], + "identification": { + "primary": { + "identifier": "first", + "identifier_type": "test", + "identifier_domain": "domain", + "identifier_link": "link", + }, + "secondary": [ + { + "identifier": "test", + "identifier_type": "test", + "identifier_domain": "dodfasdfmain", + "identifier_link": "link", + } + ], + }, + "description": { + "brief_summary": "brief_summary", + "detailed_description": "detailed_description", + }, }, ) intervention_response = _logged_in_client.post( @@ -310,40 +334,41 @@ def test_get_version_study_metadata(clients): } ], ) - collaborators_response = _logged_in_client.post( - f"/study/{study_id}/metadata/collaborators", - json=[ - { - "name": "collaborator1123", - "identifier": "collaborator1123", - "identifier_scheme": "collaborator1123", - "identifier_scheme_uri": "collaborator1123", - } - ], - ) - conditions_response = _logged_in_client.post( - f"/study/{study_id}/metadata/conditions", - json=[ - { - "name": "condition", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "condition_uri": "condition", - } - ], - ) - keywords_response = _logged_in_client.post( - f"/study/{study_id}/metadata/keywords", - json=[ - { - "name": "keywords", - "classification_code": "classification code", - "scheme": "scheme", - "scheme_uri": "scheme uri", - "keyword_uri": "keywords", - } - ], + team_response = _logged_in_client.post( + f"/study/{study_id}/metadata/team", + json={ + "collaborators": [ + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + { + "name": "collaborator1123", + "identifier": "collaborator1123", + "identifier_scheme": "collaborator1123", + "identifier_scheme_uri": "collaborator1123", + }, + ], + "sponsors": { + "lead_sponsor_identifier_scheme": "scheme", + "lead_sponsor_identifier_scheme_uri": "uri", + "responsible_party_type": "Sponsor", + "responsible_party_investigator_first_name": "name", + "responsible_party_investigator_last_name": "surname", + "responsible_party_investigator_title": "title", + "responsible_party_investigator_identifier_value": "identifier", + "responsible_party_investigator_identifier_scheme": "scheme", + "responsible_party_investigator_identifier_scheme_uri": "uri", + "responsible_party_investigator_affiliation_name": "affiliation", + "responsible_party_investigator_affiliation_identifier_value": "identifier", + "responsible_party_investigator_affiliation_identifier_scheme": "scheme", + "responsible_party_investigator_affiliation_identifier_scheme_uri": "uri", + "lead_sponsor_name": "name", + "lead_sponsor_identifier": "identifier", + }, + }, ) of_response = _logged_in_client.post( @@ -368,12 +393,10 @@ def test_get_version_study_metadata(clients): assert arm_response.status_code == 201 assert cc_response.status_code == 201 assert location_response.status_code == 201 - assert id_response.status_code == 201 + assert description_response.status_code == 201 assert intervention_response.status_code == 201 + assert team_response.status_code == 201 assert of_response.status_code == 201 - assert collaborators_response.status_code == 201 - assert conditions_response.status_code == 201 - assert keywords_response.status_code == 201 response = _logged_in_client.get( f"/study/{study_id}/dataset/{dataset_id}/version/{version_id}/study-metadata" @@ -405,7 +428,7 @@ def test_get_version_study_metadata(clients): assert response_data["collaborators"][0]["name"] == "collaborator1123" assert response_data["conditions"][0]["name"] == "condition" assert response_data["keywords"][0]["name"] == "keywords" - assert response_data["description"]["brief_summary"] == "editor-brief_summary" + assert response_data["description"]["brief_summary"] == "brief_summary" assert response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert response_data["design"]["study_type"] == "Interventional" assert response_data["design"]["design_intervention_model"] == "Treatment" @@ -431,7 +454,7 @@ def test_get_version_study_metadata(clients): assert response_data["eligibility"]["sex"] == "All" assert response_data["eligibility"]["gender_based"] == "Yes" assert response_data["eligibility"]["maximum_age_value"] == 61 - assert response_data["primary_identifier"]["identifier"] == "test" + assert response_data["primary_identifier"]["identifier"] == "first" assert response_data["primary_identifier"]["identifier_type"] == "test" assert response_data["secondary_identifiers"][0]["identifier"] == "test" assert response_data["secondary_identifiers"][0]["identifier_type"] == "test" @@ -467,7 +490,7 @@ def test_get_version_study_metadata(clients): assert admin_response_data["collaborators"][0]["name"] == "collaborator1123" assert admin_response_data["conditions"][0]["name"] == "condition" assert admin_response_data["keywords"][0]["name"] == "keywords" - assert admin_response_data["description"]["brief_summary"] == "editor-brief_summary" + assert admin_response_data["description"]["brief_summary"] == "brief_summary" assert admin_response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert admin_response_data["design"]["study_type"] == "Interventional" assert admin_response_data["design"]["design_intervention_model"] == "Treatment" @@ -495,7 +518,7 @@ def test_get_version_study_metadata(clients): assert admin_response_data["eligibility"]["sex"] == "All" assert admin_response_data["eligibility"]["gender_based"] == "Yes" assert admin_response_data["eligibility"]["maximum_age_value"] == 61 - assert admin_response_data["primary_identifier"]["identifier"] == "test" + assert admin_response_data["primary_identifier"]["identifier"] == "first" assert admin_response_data["primary_identifier"]["identifier_type"] == "test" assert admin_response_data["secondary_identifiers"][0]["identifier"] == "test" assert admin_response_data["secondary_identifiers"][0]["identifier_type"] == "test" @@ -534,9 +557,7 @@ def test_get_version_study_metadata(clients): assert editor_response_data["collaborators"][0]["name"] == "collaborator1123" assert editor_response_data["conditions"][0]["name"] == "condition" assert editor_response_data["keywords"][0]["name"] == "keywords" - assert ( - editor_response_data["description"]["brief_summary"] == "editor-brief_summary" - ) + assert editor_response_data["description"]["brief_summary"] == "brief_summary" assert editor_response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert editor_response_data["design"]["study_type"] == "Interventional" assert editor_response_data["design"]["design_intervention_model"] == "Treatment" @@ -565,7 +586,7 @@ def test_get_version_study_metadata(clients): assert editor_response_data["eligibility"]["sex"] == "All" assert editor_response_data["eligibility"]["gender_based"] == "Yes" assert editor_response_data["eligibility"]["maximum_age_value"] == 61 - assert editor_response_data["primary_identifier"]["identifier"] == "test" + assert editor_response_data["primary_identifier"]["identifier"] == "first" assert editor_response_data["primary_identifier"]["identifier_type"] == "test" assert editor_response_data["secondary_identifiers"][0]["identifier"] == "test" assert editor_response_data["secondary_identifiers"][0]["identifier_type"] == "test" @@ -604,9 +625,7 @@ def test_get_version_study_metadata(clients): assert viewer_response_data["collaborators"][0]["name"] == "collaborator1123" assert viewer_response_data["conditions"][0]["name"] == "condition" assert viewer_response_data["keywords"][0]["name"] == "keywords" - assert ( - viewer_response_data["description"]["brief_summary"] == "editor-brief_summary" - ) + assert viewer_response_data["description"]["brief_summary"] == "brief_summary" assert viewer_response_data["design"]["design_allocation"] == "editor-dfasdfasd" assert viewer_response_data["design"]["study_type"] == "Interventional" assert viewer_response_data["design"]["design_intervention_model"] == "Treatment" @@ -635,7 +654,7 @@ def test_get_version_study_metadata(clients): assert viewer_response_data["eligibility"]["sex"] == "All" assert viewer_response_data["eligibility"]["gender_based"] == "Yes" assert viewer_response_data["eligibility"]["maximum_age_value"] == 61 - assert viewer_response_data["primary_identifier"]["identifier"] == "test" + assert viewer_response_data["primary_identifier"]["identifier"] == "first" assert viewer_response_data["primary_identifier"]["identifier_type"] == "test" assert viewer_response_data["secondary_identifiers"][0]["identifier"] == "test" assert viewer_response_data["secondary_identifiers"][0]["identifier_type"] == "test" diff --git a/tests/functional/test_070_user.py b/tests/functional/test_070_user.py index b4202ea8..2fc40617 100644 --- a/tests/functional/test_070_user.py +++ b/tests/functional/test_070_user.py @@ -50,7 +50,7 @@ def test_post_password_change(clients): assert v_response.status_code == 200 meta = db.metadata for table in reversed(meta.sorted_tables): - if table.name == 'session': + if table.name == "session": session_entries = db.session.execute(table.select()).fetchall() assert len(session_entries) == 0 @@ -113,7 +113,7 @@ def test_post_login_new_password(clients): assert response.status_code == 200 meta = db.metadata for table in reversed(meta.sorted_tables): - if table.name == 'session': + if table.name == "session": session_entries = db.session.execute(table.select()).fetchall() assert len(session_entries) == 1 @@ -126,18 +126,10 @@ def test_post_logout(clients): """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - response = _logged_in_client.post( - "/auth/logout" - ) - a_response = _admin_client.post( - "/auth/logout" - ) - e_response = _editor_client.post( - "/auth/logout" - ) - v_response = _viewer_client.post( - "/auth/logout" - ) + response = _logged_in_client.post("/auth/logout") + a_response = _admin_client.post("/auth/logout") + e_response = _editor_client.post("/auth/logout") + v_response = _viewer_client.post("/auth/logout") assert response.status_code == 204 assert a_response.status_code == 204 @@ -145,6 +137,6 @@ def test_post_logout(clients): assert v_response.status_code == 204 meta = db.metadata for table in reversed(meta.sorted_tables): - if table.name == 'session': + if table.name == "session": session_entries = db.session.execute(table.select()).fetchall() assert len(session_entries) == 0 From 2c9210cb673490fc757a25217a102d3db4c8e44a Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 11 Mar 2025 20:44:20 -0700 Subject: [PATCH 486/505] fix: update model for the user and contributor --- apis/user.py | 3 ++- model/study_contributor.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/apis/user.py b/apis/user.py index 68281ce7..44581a07 100644 --- a/apis/user.py +++ b/apis/user.py @@ -78,6 +78,7 @@ def validate_is_valid_email(instance): "properties": { "id": {"type": "string"}, "email_address": {"type": "string", "format": "valid_email"}, + "email_verified": {"type": "boolean"}, "username": {"type": "string", "minLength": 0}, "first_name": {"type": "string", "minLength": 0}, "last_name": {"type": "string", "minLength": 0}, @@ -104,7 +105,7 @@ def validate_is_valid_email(instance): data: Union[Any, dict] = request.json user = model.User.query.get(g.user.id) - # user.update(data) # don't update the username and email_address for now + # user.update(data) # don't update the email_address for now user_details = user.user_details user_details.update(data) model.db.session.commit() diff --git a/model/study_contributor.py b/model/study_contributor.py index 7ba8eb21..bc09153d 100644 --- a/model/study_contributor.py +++ b/model/study_contributor.py @@ -33,6 +33,9 @@ def to_dict(self): "name": ( self.user.user_details.first_name if self.user.user_details else None ), + "lastName": ( + self.user.user_details.last_name if self.user.user_details else None + ), "email_address": self.user.email_address, "orcid": self.user.user_details.orcid if self.user.user_details else None, "role": self.permission, From 985ccd72b444881a01826c2ff1655b2a694f5331 Mon Sep 17 00:00:00 2001 From: Aydan <62059163+Aydawka@users.noreply.github.com> Date: Mon, 24 Mar 2025 16:17:11 -0700 Subject: [PATCH 487/505] fix: restyle dataset metadata (#63) * fix: general information endpoint * fix: update general information * fix: update general information * fix: update schema for the general information * fix: update team * fix: update team schema * fix: rename contributoir to team * fix: access and rights * fix: restructure dataset metadata * feat: dataset metadata test * fix: format * fix: flask restx validation * fix: flask restx validation * fix: study team * style: format * style: format * fix: dataset metadata version test --- apis/__init__.py | 30 +- apis/dataset_metadata/dataset_access.py | 72 - .../dataset_metadata/dataset_access_rights.py | 209 + .../dataset_alternate_identifier.py | 3 +- apis/dataset_metadata/dataset_consent.py | 84 - apis/dataset_metadata/dataset_contributor.py | 300 - .../dataset_data_management.py | 219 + apis/dataset_metadata/dataset_date.py | 112 - .../dataset_de_ident_level.py | 84 - apis/dataset_metadata/dataset_description.py | 137 - apis/dataset_metadata/dataset_funder.py | 125 - .../dataset_general_information.py | 302 + .../dataset_managing_organization.py | 73 - apis/dataset_metadata/dataset_other.py | 27 +- .../dataset_related_identifier.py | 3 +- apis/dataset_metadata/dataset_rights.py | 122 - apis/dataset_metadata/dataset_subject.py | 120 - apis/dataset_metadata/dataset_team.py | 403 ++ apis/dataset_metadata/dataset_title.py | 128 - .../test_040_study_dataset_metadata_api.py | 5769 ++++++++--------- .../functional/test_060_study_version_api.py | 214 +- 21 files changed, 4022 insertions(+), 4514 deletions(-) delete mode 100644 apis/dataset_metadata/dataset_access.py create mode 100644 apis/dataset_metadata/dataset_access_rights.py delete mode 100644 apis/dataset_metadata/dataset_consent.py delete mode 100644 apis/dataset_metadata/dataset_contributor.py create mode 100644 apis/dataset_metadata/dataset_data_management.py delete mode 100644 apis/dataset_metadata/dataset_date.py delete mode 100644 apis/dataset_metadata/dataset_de_ident_level.py delete mode 100644 apis/dataset_metadata/dataset_description.py delete mode 100644 apis/dataset_metadata/dataset_funder.py create mode 100644 apis/dataset_metadata/dataset_general_information.py delete mode 100644 apis/dataset_metadata/dataset_managing_organization.py delete mode 100644 apis/dataset_metadata/dataset_rights.py delete mode 100644 apis/dataset_metadata/dataset_subject.py create mode 100644 apis/dataset_metadata/dataset_team.py delete mode 100644 apis/dataset_metadata/dataset_title.py diff --git a/apis/__init__.py b/apis/__init__.py index 399a909f..8def85ac 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -9,21 +9,14 @@ from .contributor import api as contributors_api from .dashboard import api as dashboard from .dataset import api as dataset_api -from .dataset_metadata.dataset_access import api as access +from .dataset_metadata.dataset_access_rights import api as access_rights from .dataset_metadata.dataset_alternate_identifier import api as alternate_identifier -from .dataset_metadata.dataset_consent import api as consent -from .dataset_metadata.dataset_contributor import api as dataset_contributor -from .dataset_metadata.dataset_date import api as date -from .dataset_metadata.dataset_de_ident_level import api as de_ident_level -from .dataset_metadata.dataset_description import api as description -from .dataset_metadata.dataset_funder import api as funder +from .dataset_metadata.dataset_data_management import api as dataset_data_management +from .dataset_metadata.dataset_team import api as dataset_team from .dataset_metadata.dataset_healthsheet import api as healthsheet -from .dataset_metadata.dataset_managing_organization import api as managing_organization from .dataset_metadata.dataset_other import api as dataset_other from .dataset_metadata.dataset_related_identifier import api as related_identifier -from .dataset_metadata.dataset_rights import api as rights -from .dataset_metadata.dataset_subject import api as subject -from .dataset_metadata.dataset_title import api as title +from .dataset_metadata.dataset_general_information import api as general_information from .file import api as file_api from .participant import api as participants_api from .redcap import api as redcap @@ -55,26 +48,19 @@ ) __all__ = [ - "managing_organization", "dataset_metadata_namespace", "study_metadata_namespace", "authentication", "contributors_api", "dataset_api", - "access", + "access_rights", "alternate_identifier", - "consent", + "dataset_data_management", "healthsheet", - "date", - "de_ident_level", - "description", - "funder", "dataset_other", "related_identifier", "api", - "rights", - "subject", - "title", + "general_information", "participants_api", "study_api", "arm", @@ -94,7 +80,7 @@ "user", "identification", "study_description", - "dataset_contributor", + "dataset_team", "redcap", "dashboard", "utils", diff --git a/apis/dataset_metadata/dataset_access.py b/apis/dataset_metadata/dataset_access.py deleted file mode 100644 index 4bf52f39..00000000 --- a/apis/dataset_metadata/dataset_access.py +++ /dev/null @@ -1,72 +0,0 @@ -"""API for dataset access metadata""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_access = api.model( - "DatasetAccess", - { - "id": fields.String(required=True), - "type": fields.String(required=True), - "description": fields.String(required=True), - "url": fields.String(required=True), - "url_last_checked": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/access") -class DatasetAccessResource(Resource): - """Dataset Access Resource""" - - @api.doc("access") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_access) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset access""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_access_ = dataset_.dataset_access - return dataset_access_.to_dict(), 200 - - @api.doc("update access") - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Update dataset access""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "description": {"type": "string", "minLength": 1}, - "type": {"type": "string", "minLength": 1}, - "url": {"type": "string"}, - "url_last_checked": {"type": ["integer", "null"]}, - }, - "required": [ - "description", - "type", - "url", - "url_last_checked", - ], - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_access.update(request.json) - model.db.session.commit() - return dataset_.dataset_access.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_access_rights.py b/apis/dataset_metadata/dataset_access_rights.py new file mode 100644 index 00000000..a2b50b38 --- /dev/null +++ b/apis/dataset_metadata/dataset_access_rights.py @@ -0,0 +1,209 @@ +"""API for dataset access and rights metadata""" + +from typing import Any, Union + +from flask import request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_access_rights = api.model( + "DatasetAccessRights", + { + "access": fields.Nested( + api.model( + "Access", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "description": fields.String(required=True), + "url": fields.String(required=True), + "url_last_checked": fields.Integer(required=True), + }, + ) + ), + "rights": fields.Nested( + api.model( + "Rights", + { + "id": fields.String(required=True), + "rights": fields.String(required=True), + "uri": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_scheme": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + "license_text": fields.String(required=True), + }, + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/access-rights") +class DatasetAccessRights(Resource): + """Dataset Access and Rights Resource""" + + @api.doc("access") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_access_rights) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset access""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_access_ = dataset_.dataset_access + dataset_rights_ = dataset_.dataset_rights + return { + "access": dataset_access_.to_dict(), + "rights": [d.to_dict() for d in dataset_rights_], + }, 200 + + @api.doc("update access") + @api.marshal_with(dataset_access_rights) + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Update dataset access""" + study_obj = model.Study.query.get(study_id) + data: Union[Any, dict] = request.json + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "rights": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + "rights": {"type": "string", "minLength": 1}, + "uri": {"type": "string"}, + "license_text": {"type": "string"}, + }, + "required": [ + "identifier", + "identifier_scheme", + "rights", + "uri", + "license_text", + ], + }, + "uniqueItems": True, + }, + "access": { + "type": "object", + "additionalProperties": False, + "properties": { + "description": {"type": "string", "minLength": 1}, + "type": {"type": "string", "minLength": 1}, + "url": {"type": "string"}, + "url_last_checked": {"type": ["integer", "null"]}, + }, + "required": [ + "description", + "type", + "url", + "url_last_checked", + ], + }, + }, + "required": ["rights", "access"], + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_access.update(data["access"]) + list_of_rights = [] + for i in data["rights"]: + if "id" in i and i["id"]: + dataset_rights_ = model.DatasetRights.query.get(i["id"]) + if not dataset_rights_: + return f"Study link {i['id']} Id is not found", 404 + dataset_rights_.update(i) + list_of_rights.append(dataset_rights_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_rights_ = model.DatasetRights.from_data(dataset_, i) + model.db.session.add(dataset_rights_) + list_of_rights.append(dataset_rights_.to_dict()) + model.db.session.commit() + return { + "access": dataset_.dataset_access.to_dict(), + "rights": list_of_rights, + }, 200 + + +@api.route("/study//dataset//metadata/rights") +class DatasetRightsResource(Resource): + """Dataset Rights Resource""" + + @api.doc("update rights") + @api.response(201, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + """Update dataset rights""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + "rights": {"type": "string", "minLength": 1}, + "uri": {"type": "string"}, + "license_text": {"type": "string"}, + }, + "required": [ + "identifier", + "identifier_scheme", + "rights", + "uri", + "license_text", + ], + }, + "uniqueItems": True, + } + + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_elements = [] + for i in data: + if "id" in i and i["id"]: + dataset_rights_ = model.DatasetRights.query.get(i["id"]) + if not dataset_rights_: + return f"Study link {i['id']} Id is not found", 404 + dataset_rights_.update(i) + list_of_elements.append(dataset_rights_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_rights_ = model.DatasetRights.from_data(data_obj, i) + model.db.session.add(dataset_rights_) + list_of_elements.append(dataset_rights_.to_dict()) + model.db.session.commit() + return list_of_elements, 200 diff --git a/apis/dataset_metadata/dataset_alternate_identifier.py b/apis/dataset_metadata/dataset_alternate_identifier.py index 5d6b5e07..085d1f3e 100644 --- a/apis/dataset_metadata/dataset_alternate_identifier.py +++ b/apis/dataset_metadata/dataset_alternate_identifier.py @@ -28,7 +28,7 @@ class DatasetAlternateIdentifierResource(Resource): @api.doc("identifier") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(dataset_identifier) + @api.marshal_with(dataset_identifier) def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argument """Get dataset alternate identifier""" dataset_ = model.Dataset.query.get(dataset_id) @@ -38,6 +38,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable = unused-argum @api.doc("update identifier") @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(dataset_identifier) def post(self, study_id: int, dataset_id: int): """Update dataset alternate identifier""" study_obj = model.Study.query.get(study_id) diff --git a/apis/dataset_metadata/dataset_consent.py b/apis/dataset_metadata/dataset_consent.py deleted file mode 100644 index 958822b2..00000000 --- a/apis/dataset_metadata/dataset_consent.py +++ /dev/null @@ -1,84 +0,0 @@ -"""API for dataset consent metadata""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_consent = api.model( - "DatasetConsent", - { - "id": fields.String(required=True), - "type": fields.String(required=True), - "noncommercial": fields.Boolean(required=True), - "geog_restrict": fields.Boolean(required=True), - "research_type": fields.Boolean(required=True), - "genetic_only": fields.Boolean(required=True), - "no_methods": fields.Boolean(required=True), - "details": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/consent") -class DatasetConsentResource(Resource): - """Dataset Consent Resource""" - - @api.doc("consent") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_consent) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset consent""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_consent_ = dataset_.dataset_consent - return dataset_consent_.to_dict(), 200 - - @api.doc("update consent") - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int, dataset_id: int): - """Update dataset consent""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "type": {"type": "string", "minLength": 1}, - "details": { - "type": "string", - }, - "genetic_only": {"type": "boolean"}, - "geog_restrict": {"type": "boolean"}, - "no_methods": {"type": "boolean"}, - "noncommercial": {"type": "boolean"}, - "research_type": {"type": "boolean"}, - }, - "required": [ - "type", - "details", - "genetic_only", - "geog_restrict", - "no_methods", - "noncommercial", - "research_type", - ], - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_consent.update(data) - model.db.session.commit() - return dataset_.dataset_consent.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_contributor.py b/apis/dataset_metadata/dataset_contributor.py deleted file mode 100644 index 41336079..00000000 --- a/apis/dataset_metadata/dataset_contributor.py +++ /dev/null @@ -1,300 +0,0 @@ -"""API for dataset contributor metadata""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_contributor = api.model( - "DatasetContributor", - {}, -) - - -@api.route("/study//dataset//metadata/contributor") -class DatasetContributorResource(Resource): - """Dataset Contributor Resource""" - - @api.doc("contributor") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_contributor) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset contributor""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_contributor_ = dataset_.dataset_contributors - - return [ - d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"] - ], 200 - - @api.doc("update contributor") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset contributor""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, can't modify dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "contributor_type": { - "type": "string", - "minLength": 1, - }, - "given_name": { - "type": "string", - "minLength": 1, - }, - "family_name": {"type": ["string", "null"]}, - "name_identifier": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme_uri": { - "type": "string", - }, - "name_type": { - "type": "string", - "enum": [ - "Personal", - "Organizational", - ], - "minLength": 1, - }, - "affiliations": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "identifier": { - "type": "string", - }, - "scheme": { - "type": "string", - }, - "scheme_uri": { - "type": "string", - }, - }, - }, - "uniqueItems": True, - }, - }, - "required": [ - "contributor_type", - "name_type", - "given_name", - "affiliations", - "name_identifier", - "name_identifier_scheme", - ], - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - i["creator"] = False - if "id" in i and i["id"]: - dataset_contributor_ = model.DatasetContributor.query.get(i["id"]) - if not dataset_contributor_: - return f"Study link {i['id']} Id is not found", 404 - dataset_contributor_.update(i) - list_of_elements.append(dataset_contributor_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_contributor_ = model.DatasetContributor.from_data(data_obj, i) - model.db.session.add(dataset_contributor_) - list_of_elements.append(dataset_contributor_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route( - "/study//dataset//metadata/contributor/" -) -class DatasetContributorDelete(Resource): - """Dataset Contributor Delete Resource""" - - @api.doc("delete contributor") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - contributor_id: int, - ): - """Delete dataset contributor""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - contributor_ = model.DatasetContributor.query.get(contributor_id) - - model.db.session.delete(contributor_) - model.db.session.commit() - - return Response(status=204) - - -@api.route("/study//dataset//metadata/creator") -class DatasetCreatorResource(Resource): - """Dataset Creator Resource""" - - @api.doc("creator") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_contributor) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset creator""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_creator_ = dataset_.dataset_contributors - # TODO d.creator - return [d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"]], 200 - - @api.doc("update creator") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset creator""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "given_name": { - "type": "string", - "minLength": 1, - }, - "family_name": {"type": ["string", "null"]}, - "name_identifier": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme": { - "type": "string", - "minLength": 1, - }, - "name_identifier_scheme_uri": { - "type": "string", - }, - "name_type": { - "type": "string", - "enum": [ - "Personal", - "Organizational", - ], - "minLength": 1, - }, - "affiliations": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "name": { - "type": "string", - }, - "identifier": { - "type": "string", - }, - "scheme": { - "type": "string", - }, - "scheme_uri": { - "type": "string", - }, - }, - }, - "uniqueItems": True, - }, - }, - "required": [ - "name_type", - "given_name", - "affiliations", - "name_identifier", - "name_identifier_scheme", - ], - }, - } - - try: - validate(request.json, schema) - except ValidationError as e: - return e.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - i["creator"] = True - if "id" in i and i["id"]: - i["contributor_type"] = None - dataset_creator_ = model.DatasetContributor.query.get(i["id"]) - if not dataset_creator_: - return f"Study link {i['id']} Id is not found", 404 - dataset_creator_.update(i) - list_of_elements.append(dataset_creator_.to_dict()) - elif "id" not in i or not i["id"]: - i["contributor_type"] = None - dataset_creator_ = model.DatasetContributor.from_data(data_obj, i) - model.db.session.add(dataset_creator_) - list_of_elements.append(dataset_creator_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/creator/") -class DatasetCreatorDelete(Resource): - @api.doc("delete creator") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - creator_id: int, - ): - """Delete dataset creator""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_creator_ = model.DatasetContributor.query.get(creator_id) - model.db.session.delete(dataset_creator_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_data_management.py b/apis/dataset_metadata/dataset_data_management.py new file mode 100644 index 00000000..7c8a2089 --- /dev/null +++ b/apis/dataset_metadata/dataset_data_management.py @@ -0,0 +1,219 @@ +"""API for dataset consent metadata""" +import typing + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_data_management = api.model( + "DatasetDataManagement", + { + "consent": fields.Nested( + api.model( + "DatasetConsent", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "noncommercial": fields.Boolean(required=True), + "geog_restrict": fields.Boolean(required=True), + "research_type": fields.Boolean(required=True), + "genetic_only": fields.Boolean(required=True), + "no_methods": fields.Boolean(required=True), + "details": fields.String(required=True), + }, + ) + ), + "subjects": fields.List( + fields.Nested( + api.model( + "DatasetSubjects", + { + "id": fields.String(required=True), + "subject": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "value_uri": fields.String(required=True), + "classification_code": fields.String(required=True), + }, + ) + ) + ), + "deident": fields.Nested( + api.model( + "DatasetDeIdentLevel", + { + "id": fields.String(required=True), + "type": fields.String(required=True), + "direct": fields.Boolean(required=True), + "hipaa": fields.Boolean(required=True), + "dates": fields.Boolean(required=True), + "nonarr": fields.Boolean(required=True), + "k_anon": fields.Boolean(required=True), + "details": fields.String(required=True), + }, + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/data-management") +class DatasetDataManagement(Resource): + """Dataset Data management Resource""" + + @api.doc("consent") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.marshal_with(dataset_consent) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset consent""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_consent_ = dataset_.dataset_consent + de_ident_level_ = dataset_.dataset_de_ident_level + dataset_subject_ = dataset_.dataset_subject + return { + "consent": dataset_consent_.to_dict(), + "deident": de_ident_level_.to_dict(), + "subjects": [d.to_dict() for d in dataset_subject_], + }, 200 + + @api.doc("update consent") + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self, study_id: int, dataset_id: int): + """Update dataset consent""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "consent": { + "type": "object", + "additionalProperties": False, + "properties": { + "type": {"type": "string", "minLength": 1}, + "details": {"type": "string"}, + "genetic_only": {"type": "boolean"}, + "geog_restrict": {"type": "boolean"}, + "no_methods": {"type": "boolean"}, + "noncommercial": {"type": "boolean"}, + "research_type": {"type": "boolean"}, + }, + "required": [ + "type", + "details", + "genetic_only", + "geog_restrict", + "no_methods", + "noncommercial", + "research_type", + ], + }, + "subjects": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "classification_code": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + "subject": {"type": "string", "minLength": 1}, + "value_uri": {"type": "string"}, + }, + "required": [ + "subject", + "scheme", + "scheme_uri", + "value_uri", + "classification_code", + ], + }, + "uniqueItems": True, + }, + "deident": { + "type": "object", + "additionalProperties": False, + "properties": { + "type": {"type": "string", "minLength": 1}, + "details": {"type": "string"}, + "direct": {"type": "boolean"}, + "hipaa": {"type": "boolean"}, + "dates": {"type": "boolean"}, + "k_anon": {"type": "boolean"}, + "nonarr": {"type": "boolean"}, + }, + "required": [ + "type", + "details", + "direct", + "hipaa", + "dates", + "k_anon", + "nonarr", + ], + }, + }, + "required": [], + } + try: + validate(instance=request.json, schema=schema) + except ValidationError as err: + return err.message, 400 + + data: typing.Union[dict, typing.Any] = request.json + dataset_ = model.Dataset.query.get(dataset_id) + dataset_.dataset_consent.update(data["consent"]) + dataset_.dataset_de_ident_level.update(data["deident"]) + list_of_subjects = [] + for i in data["subjects"]: + if "id" in i and i["id"]: + dataset_subject_ = model.DatasetSubject.query.get(i["id"]) + if not dataset_subject_: + return f"Study link {i['id']} Id is not found", 404 + dataset_subject_.update(i) + list_of_subjects.append(dataset_subject_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_subject_ = model.DatasetSubject.from_data(dataset_, i) + model.db.session.add(dataset_subject_) + list_of_subjects.append(dataset_subject_.to_dict()) + model.db.session.commit() + return { + "consent": dataset_.dataset_consent.to_dict(), + "deident": dataset_.dataset_de_ident_level.to_dict(), + "subjects": list_of_subjects, + }, 200 + + +@api.route("/study//dataset//metadata/subject/") +class DatasetSubjectUpdate(Resource): + """Dataset Subject Update Resource""" + + @api.doc("delete subject") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, # pylint: disable= unused-argument + dataset_id: int, # pylint: disable= unused-argument + subject_id: int, + ): + """Delete dataset subject""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can't make change in dataset metadata", 403 + dataset_subject_ = model.DatasetSubject.query.get(subject_id) + + model.db.session.delete(dataset_subject_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_date.py b/apis/dataset_metadata/dataset_date.py deleted file mode 100644 index 947c356d..00000000 --- a/apis/dataset_metadata/dataset_date.py +++ /dev/null @@ -1,112 +0,0 @@ -"""APIs for dataset date metadata""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_date = api.model( - "DatasetDate", - { - "id": fields.String(required=True), - "date": fields.String(required=True), - "type": fields.String(required=True), - "information": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/date") -class DatasetDateResource(Resource): - """Dataset Date Resource""" - - @api.doc("date") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_date) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset date""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_date_ = dataset_.dataset_date - return [d.to_dict() for d in dataset_date_], 200 - - @api.doc("update date") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset date""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "date": { - "type": "integer", - }, - "type": { - "type": "string", - "minLength": 1, - }, - "information": { - "type": "string", - }, - }, - "required": ["date", "type", "information"], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_date_ = model.DatasetDate.query.get(i["id"]) - if not dataset_date_: - return f"Study link {i['id']} Id is not found", 404 - dataset_date_.update(i) - list_of_elements.append(dataset_date_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_date_ = model.DatasetDate.from_data(data_obj, i) - model.db.session.add(dataset_date_) - list_of_elements.append(dataset_date_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/date/") -class DatasetDateDeleteResource(Resource): - """Dataset Date Delete Resource""" - - @api.doc("delete date") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, study_id: int, dataset_id: int, date_id: int - ): # pylint: disable= unused-argument - """Delete dataset date""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - date_ = model.DatasetDate.query.get(date_id) - - model.db.session.delete(date_) - model.db.session.commit() - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_de_ident_level.py b/apis/dataset_metadata/dataset_de_ident_level.py deleted file mode 100644 index a9f7c7f5..00000000 --- a/apis/dataset_metadata/dataset_de_ident_level.py +++ /dev/null @@ -1,84 +0,0 @@ -"""APIs for dataset de-identification level""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -de_ident_level = api.model( - "DatasetDeIdentLevel", - { - "id": fields.String(required=True), - "type": fields.String(required=True), - "direct": fields.Boolean(required=True), - "hipaa": fields.Boolean(required=True), - "dates": fields.Boolean(required=True), - "nonarr": fields.Boolean(required=True), - "k_anon": fields.Boolean(required=True), - "details": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/de-identification-level") -class DatasetDeIdentLevelResource(Resource): - """Dataset De-Identification Level Resource""" - - @api.doc("de_ident_level") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(de_ident_level) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset de-identification level""" - dataset_ = model.Dataset.query.get(dataset_id) - de_ident_level_ = dataset_.dataset_de_ident_level - return de_ident_level_.to_dict(), 200 - - @api.doc("update ident level") - @api.response(200, "Success") - @api.response(400, "Validation Error") - def put(self, study_id: int, dataset_id: int): - """Update dataset de-identification level""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "type": {"type": "string", "minLength": 1}, - "details": { - "type": "string", - }, - "direct": {"type": "boolean"}, - "hipaa": {"type": "boolean"}, - "dates": {"type": "boolean"}, - "k_anon": {"type": "boolean"}, - "nonarr": {"type": "boolean"}, - }, - "required": [ - "type", - "details", - "direct", - "hipaa", - "dates", - "k_anon", - "nonarr", - ], - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_de_ident_level.update(data) - model.db.session.commit() - return dataset_.dataset_de_ident_level.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_description.py b/apis/dataset_metadata/dataset_description.py deleted file mode 100644 index ddd4c56a..00000000 --- a/apis/dataset_metadata/dataset_description.py +++ /dev/null @@ -1,137 +0,0 @@ -"""API endpoints for dataset description""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_description = api.model( - "DatasetDescription", - { - "id": fields.String(required=True), - "description": fields.String(required=True), - "description_type": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/description") -class DatasetDescriptionResource(Resource): - """Dataset Description Resource""" - - @api.doc("description") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.marshal_with(dataset_description) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset description""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_description_ = dataset_.dataset_description - return [d.to_dict() for d in dataset_description_], 200 - - @api.doc("update description") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset description""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "description": { - "type": "string", - "minLength": 1, - }, - "type": { - "type": "string", - "enum": [ - "Abstract", - "Methods", - "SeriesInformation", - "TableOfContents", - "TechnicalInfo", - "Other", - ], - }, - }, - "required": ["description", "type"], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_description_ = model.DatasetDescription.query.get(i["id"]) - # if dataset_description_.type == "Abstract": - # return ( - # "Abstract type can not be modified", - # 403, - # ) - dataset_description_.update(i) - list_of_elements.append(dataset_description_.to_dict()) - elif "id" not in i or not i["id"]: - if i["type"] == "Abstract": - return ( - "Abstract type in description can not be given", - 403, - ) - dataset_description_ = model.DatasetDescription.from_data(data_obj, i) - model.db.session.add(dataset_description_) - list_of_elements.append(dataset_description_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - @api.route( - "/study//dataset//" - "metadata/description/" - ) - class DatasetDescriptionUpdate(Resource): - """Dataset Description Update Resource""" - - @api.doc("delete description") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - description_id: int, - ): - """Delete dataset description""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return ( - "Access denied, you can not make any change in dataset metadata", - 403, - ) - dataset_description_ = model.DatasetDescription.query.get(description_id) - if dataset_description_.type == "Abstract": - return ( - "Abstract description can not be deleted", - 403, - ) - model.db.session.delete(dataset_description_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_funder.py b/apis/dataset_metadata/dataset_funder.py deleted file mode 100644 index 7110e5b9..00000000 --- a/apis/dataset_metadata/dataset_funder.py +++ /dev/null @@ -1,125 +0,0 @@ -"""API endpoints for dataset funder""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_funder = api.model( - "DatasetFunder", - { - "id": fields.String(required=True), - "name": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_type": fields.String(required=True), - "identifier_scheme_uri": fields.String(required=True), - "award_number": fields.String(required=True), - "award_uri": fields.String(required=True), - "award_title": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/funder") -class DatasetFunderResource(Resource): - """Dataset Funder Resource""" - - @api.doc("funder") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_funder) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset funder""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_funder_ = dataset_.dataset_funder - return [d.to_dict() for d in dataset_funder_], 200 - - @api.doc("update funder") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Update dataset funder""" - data: Union[Any, dict] = request.json - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "name": {"type": "string", "minLength": 1}, - "award_number": {"type": "string", "minLength": 1}, - "award_title": {"type": "string"}, - "award_uri": {"type": "string"}, - "identifier": {"type": "string", "minLength": 1}, - "identifier_scheme_uri": {"type": "string"}, - "identifier_type": {"type": ["string", "null"]}, - }, - "required": [ - "name", - "award_number", - "award_title", - "award_uri", - "identifier", - "identifier_scheme_uri", - "identifier_type", - ], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_funder_ = model.DatasetFunder.query.get(i["id"]) - if not dataset_funder_: - return f"Study link {i['id']} Id is not found", 404 - dataset_funder_.update(i) - list_of_elements.append(dataset_funder_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_funder_ = model.DatasetFunder.from_data(data_obj, i) - model.db.session.add(dataset_funder_) - list_of_elements.append(dataset_funder_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/funder/") -class DatasetFunderUpdate(Resource): - """Dataset Funder Update Resource""" - - @api.doc("delete funder") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - funder_id: int, - ): - """Delete dataset funder""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_funder_ = model.DatasetFunder.query.get(funder_id) - - model.db.session.delete(dataset_funder_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_general_information.py b/apis/dataset_metadata/dataset_general_information.py new file mode 100644 index 00000000..4d81987d --- /dev/null +++ b/apis/dataset_metadata/dataset_general_information.py @@ -0,0 +1,302 @@ +"""API for dataset title metadata""" + +from typing import Any, Union + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_general_information = api.model( + "DatasetGeneralInformation", + { + "titles": fields.List( + fields.Nested( + api.model( + "DatasetTitle", + { + "id": fields.String(required=True), + "title": fields.String(required=True), + "type": fields.String(required=True), + }, + ) + ) + ), + "descriptions": fields.List( + fields.Nested( + api.model( + "DatasetDescription", + { + "id": fields.String(required=True), + "description": fields.String(required=True), + "type": fields.String(required=True), + }, + ) + ) + ), + "dates": fields.List( + fields.Nested( + api.model( + "DatasetDate", + { + "id": fields.String(required=True), + "date": fields.Integer(required=True), + "type": fields.String(required=True), + "information": fields.String(required=True), + }, + ) + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/general-information") +class DatasetGeneralInformation(Resource): + """Dataset General Information Resource""" + + @api.doc("title") + @api.response(200, "Success") + @api.response(400, "Validation Error") + # @api.param("id", "The dataset identifier") + @api.marshal_with(dataset_general_information) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset title""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_title_ = dataset_.dataset_title + dataset_description_ = dataset_.dataset_description + dataset_date_ = dataset_.dataset_date + return { + "titles": [d.to_dict() for d in dataset_title_], + "descriptions": [d.to_dict() for d in dataset_description_], + "dates": [d.to_dict() for d in dataset_date_], + }, 200 + + @api.doc("update general information") + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_general_information) + def post(self, study_id: int, dataset_id: int): + """Update dataset title""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "titles": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "title": {"type": "string", "minLength": 1}, + "type": { + "type": "string", + "enum": [ + "MainTitle", + "AlternativeTitle", + "Subtitle", + "TranslatedTitle", + "OtherTitle", + ], + }, + }, + "required": ["title", "type"], + }, + }, + "descriptions": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "description": {"type": "string", "minLength": 1}, + "type": { + "type": "string", + "enum": [ + "Abstract", + "Methods", + "SeriesInformation", + "TableOfContents", + "TechnicalInfo", + "Other", + ], + }, + }, + "required": ["description", "type"], + }, + }, + "dates": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "date": {"type": "integer"}, + "type": {"type": "string", "minLength": 1}, + "information": {"type": "string"}, + }, + "required": ["date", "type", "information"], + }, + }, + }, + } + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + list_of_titles = [] + for i in data["titles"]: + if "id" in i and i["id"]: + dataset_title_ = model.DatasetTitle.query.get(i["id"]) + dataset_title_.update(i) + list_of_titles.append(dataset_title_.to_dict()) + elif "id" not in i or not i["id"]: + if i["type"] == "MainTitle": + return ( + "Main Title type can not be given", + 403, + ) + dataset_title_ = model.DatasetTitle.from_data(data_obj, i) + model.db.session.add(dataset_title_) + list_of_titles.append(dataset_title_.to_dict()) + + list_of_description = [] + for i in data["descriptions"]: + if "id" in i and i["id"]: + dataset_description_ = model.DatasetDescription.query.get(i["id"]) + # if dataset_description_.type == "Abstract": + # return ( + # "Abstract type can not be modified", + # 403, + # ) + dataset_description_.update(i) + list_of_description.append(dataset_description_.to_dict()) + elif "id" not in i or not i["id"]: + if i["type"] == "Abstract": + return ( + "Abstract type in description can not be given", + 403, + ) + dataset_description_ = model.DatasetDescription.from_data(data_obj, i) + model.db.session.add(dataset_description_) + list_of_description.append(dataset_description_.to_dict()) + + list_of_dates = [] + for i in data["dates"]: + if "id" in i and i["id"]: + dataset_date_ = model.DatasetDate.query.get(i["id"]) + if not dataset_date_: + return f"Study link {i['id']} Id is not found", 404 + dataset_date_.update(i) + list_of_dates.append(dataset_date_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_date_ = model.DatasetDate.from_data(data_obj, i) + model.db.session.add(dataset_date_) + list_of_dates.append(dataset_date_.to_dict()) + + model.db.session.commit() + + return ( + { + "titles": list_of_titles, + "descriptions": list_of_description, + "dates": list_of_dates, + }, + 200, + ) + + +@api.route("/study//dataset//metadata/title/") +class DatasetTitleDelete(Resource): + """Dataset Title Update Resource""" + + @api.doc("delete title") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + title_id: int, + ): + """Delete dataset title""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not make any change in dataset metadata", + 403, + ) + dataset_title_ = model.DatasetTitle.query.get(title_id) + if dataset_title_.type == "MainTitle": + return ( + "Main Title type can not be deleted", + 403, + ) + model.db.session.delete(dataset_title_) + model.db.session.commit() + return Response(status=204) + + +@api.route("/study//dataset//metadata/date/") +class DatasetDateDeleteResource(Resource): + """Dataset Date Delete Resource""" + + @api.doc("delete date") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, study_id: int, dataset_id: int, date_id: int + ): # pylint: disable= unused-argument + """Delete dataset date""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + date_ = model.DatasetDate.query.get(date_id) + + model.db.session.delete(date_) + model.db.session.commit() + return Response(status=204) + + @api.route( + "/study//dataset//" + "metadata/description/" + ) + class DatasetDescriptionUpdate(Resource): + """Dataset Description Update Resource""" + + @api.doc("delete description") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + description_id: int, + ): + """Delete dataset description""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return ( + "Access denied, you can not make any change in dataset metadata", + 403, + ) + dataset_description_ = model.DatasetDescription.query.get(description_id) + if dataset_description_.type == "Abstract": + return ( + "Abstract description can not be deleted", + 403, + ) + model.db.session.delete(dataset_description_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_managing_organization.py b/apis/dataset_metadata/dataset_managing_organization.py deleted file mode 100644 index 2ca590ae..00000000 --- a/apis/dataset_metadata/dataset_managing_organization.py +++ /dev/null @@ -1,73 +0,0 @@ -"""API endpoints for other dataset metadata""" - -from flask import request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_managing_organization = api.model( - "DatasetManagingOrganization", - { - "name": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_scheme": fields.String(required=True), - "identifier_scheme_uri": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/managing-organization") -class DatasetManagingOrganization(Resource): - """Dataset Publisher Resource""" - - @api.doc("publisher") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_managing_organization) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset publisher metadata""" - dataset_ = model.Dataset.query.get(dataset_id) - managing_organization_ = dataset_.dataset_managing_organization - return managing_organization_.to_dict(), 200 - - @api.doc("update organization") - @api.response(200, "Success") - @api.response(400, "Validation Error") - @api.marshal_with(dataset_managing_organization) - def put(self, study_id: int, dataset_id: int): - """Update dataset managing organization metadata""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "object", - "additionalProperties": False, - "properties": { - "name": {"type": "string", "minLength": 1}, - "identifier": {"type": "string"}, - "identifier_scheme": {"type": "string"}, - "identifier_scheme_uri": {"type": "string"}, - }, - "required": [ - "name", - "identifier", - "identifier_scheme", - "identifier_scheme_uri", - ], - } - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data = request.json - dataset_ = model.Dataset.query.get(dataset_id) - dataset_.dataset_managing_organization.update(data) - - model.db.session.commit() - return dataset_.dataset_managing_organization.to_dict(), 200 diff --git a/apis/dataset_metadata/dataset_other.py b/apis/dataset_metadata/dataset_other.py index 04246f97..c620d00f 100644 --- a/apis/dataset_metadata/dataset_other.py +++ b/apis/dataset_metadata/dataset_other.py @@ -1,24 +1,24 @@ """API endpoints for other dataset metadata""" from flask import request -from flask_restx import Resource +from flask_restx import Resource, fields from jsonschema import ValidationError, validate import model from apis.authentication import is_granted from apis.dataset_metadata_namespace import api -# dataset_other = api.model( -# "DatasetOther", -# { -# "language": fields.String(required=True), -# "size": fields.List(fields.String, required=True), -# "format": fields.List(fields.String, required=True), -# "standards_followed": fields.String(required=True), -# "acknowledgement": fields.String(required=True), -# "resource_type": fields.String(required=True), -# }, -# ) +dataset_other = api.model( + "DatasetOther", + { + "language": fields.String(required=False), + "size": fields.List(fields.String(required=False), required=True), + "format": fields.List(fields.String(required=False), required=True), + "standards_followed": fields.String(required=True), + "acknowledgement": fields.String(required=True), + "resource_type": fields.String(required=True), + }, +) @api.route("/study//dataset//metadata/other") @@ -28,7 +28,7 @@ class DatasetOtherResource(Resource): @api.doc("other") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(dataset_other) + @api.marshal_with(dataset_other) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument """Get dataset other metadata""" dataset_ = model.Dataset.query.get(dataset_id) @@ -42,7 +42,6 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume def put(self, study_id: int, dataset_id: int): """Update dataset other metadata""" study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): return "Access denied, you can not make any change in dataset metadata", 403 diff --git a/apis/dataset_metadata/dataset_related_identifier.py b/apis/dataset_metadata/dataset_related_identifier.py index 3e2ce088..cdede905 100644 --- a/apis/dataset_metadata/dataset_related_identifier.py +++ b/apis/dataset_metadata/dataset_related_identifier.py @@ -32,7 +32,7 @@ class DatasetRelatedIdentifierResource(Resource): @api.doc("related identifier") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(dataset_related_identifier) + @api.marshal_with(dataset_related_identifier) def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument """Get dataset related identifier""" dataset_ = model.Dataset.query.get(dataset_id) @@ -42,6 +42,7 @@ def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argume @api.doc("update related identifier") @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(dataset_related_identifier) def post(self, study_id: int, dataset_id: int): """Update dataset related identifier""" study_obj = model.Study.query.get(study_id) diff --git a/apis/dataset_metadata/dataset_rights.py b/apis/dataset_metadata/dataset_rights.py deleted file mode 100644 index 9f835def..00000000 --- a/apis/dataset_metadata/dataset_rights.py +++ /dev/null @@ -1,122 +0,0 @@ -"""API endpoints for dataset rights""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_rights = api.model( - "DatasetRights", - { - "id": fields.String(required=True), - "rights": fields.String(required=True), - "uri": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_scheme": fields.String(required=True), - "identifier_scheme_uri": fields.String(required=True), - "license_text": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/rights") -class DatasetRightsResource(Resource): - """Dataset Rights Resource""" - - @api.doc("rights") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_rights) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset rights""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_rights_ = dataset_.dataset_rights - return [d.to_dict() for d in dataset_rights_], 200 - - @api.doc("update rights") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset rights""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "identifier": {"type": "string"}, - "identifier_scheme": {"type": "string"}, - "identifier_scheme_uri": {"type": "string"}, - "rights": {"type": "string", "minLength": 1}, - "uri": {"type": "string"}, - "license_text": {"type": "string"}, - }, - "required": [ - "identifier", - "identifier_scheme", - "rights", - "uri", - "license_text", - ], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_rights_ = model.DatasetRights.query.get(i["id"]) - if not dataset_rights_: - return f"Study link {i['id']} Id is not found", 404 - dataset_rights_.update(i) - list_of_elements.append(dataset_rights_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_rights_ = model.DatasetRights.from_data(data_obj, i) - model.db.session.add(dataset_rights_) - list_of_elements.append(dataset_rights_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/rights/") -class DatasetRightsUpdate(Resource): - """Dataset Rights Update Resource""" - - @api.doc("delete rights") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - rights_id: int, - ): - """Delete dataset rights""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - dataset_rights_ = model.DatasetRights.query.get(rights_id) - - model.db.session.delete(dataset_rights_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_subject.py b/apis/dataset_metadata/dataset_subject.py deleted file mode 100644 index 6aa372cc..00000000 --- a/apis/dataset_metadata/dataset_subject.py +++ /dev/null @@ -1,120 +0,0 @@ -"""API endpoints for dataset subject""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_subject = api.model( - "DatasetSubject", - { - "id": fields.String(required=True), - "subject": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "value_uri": fields.String(required=True), - "classification_code": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/subject") -class DatasetSubjectResource(Resource): - """Dataset Subject Resource""" - - @api.doc("subject") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_subject) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset subject""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_subject_ = dataset_.dataset_subject - return [d.to_dict() for d in dataset_subject_], 200 - - @api.doc("update subject") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset subject""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can't modify dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "classification_code": {"type": "string"}, - "scheme": {"type": "string"}, - "scheme_uri": {"type": "string"}, - "subject": {"type": "string", "minLength": 1}, - "value_uri": {"type": "string"}, - }, - "required": [ - "subject", - "scheme", - "scheme_uri", - "value_uri", - "classification_code", - ], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_subject_ = model.DatasetSubject.query.get(i["id"]) - if not dataset_subject_: - return f"Study link {i['id']} Id is not found", 404 - dataset_subject_.update(i) - list_of_elements.append(dataset_subject_.to_dict()) - elif "id" not in i or not i["id"]: - dataset_subject_ = model.DatasetSubject.from_data(data_obj, i) - model.db.session.add(dataset_subject_) - list_of_elements.append(dataset_subject_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - -@api.route("/study//dataset//metadata/subject/") -class DatasetSubjectUpdate(Resource): - """Dataset Subject Update Resource""" - - @api.doc("delete subject") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, # pylint: disable= unused-argument - dataset_id: int, # pylint: disable= unused-argument - subject_id: int, - ): - """Delete dataset subject""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can't make change in dataset metadata", 403 - dataset_subject_ = model.DatasetSubject.query.get(subject_id) - - model.db.session.delete(dataset_subject_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/dataset_metadata/dataset_team.py b/apis/dataset_metadata/dataset_team.py new file mode 100644 index 00000000..d17aaeda --- /dev/null +++ b/apis/dataset_metadata/dataset_team.py @@ -0,0 +1,403 @@ +"""API for dataset contributor metadata""" + +from typing import Any, Union + +from flask import Response, request +from flask_restx import Resource, fields +from jsonschema import ValidationError, validate + +import model +from apis.authentication import is_granted +from apis.dataset_metadata_namespace import api + +dataset_team = api.model( + "DatasetTeam", + { + "contributors": fields.List( + fields.Nested( + api.model( + "Contributor", + { + "id": fields.String(required=True), + "family_name": fields.String(), + "given_name": fields.String(required=True), + "name_type": fields.String(), + "name_identifier": fields.String(required=True), + "name_identifier_scheme": fields.String(required=True), + "name_identifier_scheme_uri": fields.String(required=True), + "creator": fields.Boolean(required=True), + "contributor_type": fields.String(), + "affiliations": fields.Raw(required=True), + "created_at": fields.Integer(required=True), + }, + ) + ) + ), + "creators": fields.List( + fields.Nested( + api.model( + "Creator", + { + "id": fields.String(required=True), + "family_name": fields.String(), + "given_name": fields.String(required=True), + "name_type": fields.String(), + "name_identifier": fields.String(required=True), + "name_identifier_scheme": fields.String(required=True), + "name_identifier_scheme_uri": fields.String(required=True), + "creator": fields.Boolean(required=True), + "contributor_type": fields.String(), + "affiliations": fields.Raw(required=True), + "created_at": fields.Integer(required=True), + }, + ) + ) + ), + "managing_organization": fields.Nested( + api.model( + "DatasetManagingOrganization", + { + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_scheme": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + }, + ) + ), + "funders": fields.List( + fields.Nested( + api.model( + "Funders", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_scheme_uri": fields.String(required=True), + "award_number": fields.String(required=True), + "award_uri": fields.String(required=True), + "award_title": fields.String(required=True), + }, + ) + ) + ), + }, +) + + +@api.route("/study//dataset//metadata/team") +class DatasetTeamResource(Resource): + """Dataset Team Resource""" + + @api.doc("team") + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_team) + def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument + """Get dataset creator""" + dataset_ = model.Dataset.query.get(dataset_id) + dataset_creator_ = dataset_.dataset_contributors + dataset_contributor_ = dataset_.dataset_contributors + dataset_funder_ = dataset_.dataset_funder + managing_organization_ = dataset_.dataset_managing_organization + return { + "creators": [ + d.to_dict() for d in dataset_creator_ if d.to_dict()["creator"] + ], + "contributors": [ + d.to_dict() for d in dataset_contributor_ if not d.to_dict()["creator"] + ], + "managing_organization": managing_organization_.to_dict(), + "funders": [d.to_dict() for d in dataset_funder_], + }, 200 + + @api.doc("update team") + @api.response(201, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(dataset_team) + def post(self, study_id: int, dataset_id: int): + """Update dataset team""" + study_obj = model.Study.query.get(study_id) + + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + + schema = { + "type": "object", + "additionalProperties": False, + "properties": { + "creators": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "given_name": {"type": "string", "minLength": 1}, + "family_name": {"type": ["string", "null"]}, + "name_identifier": {"type": "string", "minLength": 1}, + "name_identifier_scheme": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme_uri": {"type": "string"}, + "name_type": { + "type": "string", + "enum": ["Personal", "Organizational"], + "minLength": 1, + }, + "affiliations": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": {"type": "string"}, + "identifier": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + }, + }, + "uniqueItems": True, + }, + }, + "required": [ + "name_type", + "given_name", + "affiliations", + "name_identifier", + "name_identifier_scheme", + ], + }, + }, + "contributors": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "contributor_type": {"type": "string", "minLength": 1}, + "given_name": {"type": "string", "minLength": 1}, + "family_name": {"type": ["string", "null"]}, + "name_identifier": {"type": "string", "minLength": 1}, + "name_identifier_scheme": { + "type": "string", + "minLength": 1, + }, + "name_identifier_scheme_uri": {"type": "string"}, + "name_type": { + "type": "string", + "enum": ["Personal", "Organizational"], + "minLength": 1, + }, + "affiliations": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": {"type": "string"}, + "identifier": {"type": "string"}, + "scheme": {"type": "string"}, + "scheme_uri": {"type": "string"}, + }, + }, + "uniqueItems": True, + }, + }, + "required": [ + "contributor_type", + "name_type", + "given_name", + "affiliations", + "name_identifier", + "name_identifier_scheme", + ], + }, + }, + "funders": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": False, + "properties": { + "id": {"type": "string"}, + "name": {"type": "string", "minLength": 1}, + "award_number": {"type": "string", "minLength": 1}, + "award_title": {"type": "string"}, + "award_uri": {"type": "string"}, + "identifier": {"type": "string", "minLength": 1}, + "identifier_scheme_uri": {"type": "string"}, + "identifier_type": {"type": ["string", "null"]}, + }, + "required": [ + "name", + "award_number", + "award_title", + "award_uri", + "identifier", + "identifier_scheme_uri", + "identifier_type", + ], + }, + "uniqueItems": True, + }, + "managing_organization": { + "type": "object", + "additionalProperties": False, + "properties": { + "name": {"type": "string", "minLength": 1}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + }, + "required": [ + "name", + "identifier", + "identifier_scheme", + "identifier_scheme_uri", + ], + }, + }, + "required": [ + "creators", + "contributors", + "funders", + "managing_organization", + ], + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 + + data: Union[Any, dict] = request.json + data_obj = model.Dataset.query.get(dataset_id) + + list_of_creator = [] + for i in data["creators"]: + i["creator"] = True + if "id" in i and i["id"]: + i["contributor_type"] = None + dataset_creator_ = model.DatasetContributor.query.get(i["id"]) + if not dataset_creator_: + return f"Study link {i['id']} Id is not found", 404 + dataset_creator_.update(i) + list_of_creator.append(dataset_creator_.to_dict()) + elif "id" not in i or not i["id"]: + i["contributor_type"] = None + dataset_creator_ = model.DatasetContributor.from_data(data_obj, i) + model.db.session.add(dataset_creator_) + list_of_creator.append(dataset_creator_.to_dict()) + + list_of_contributors = [] + for i in data["contributors"]: + i["creator"] = False + if "id" in i and i["id"]: + dataset_contributor_ = model.DatasetContributor.query.get(i["id"]) + if not dataset_contributor_: + return f"Study link {i['id']} Id is not found", 404 + dataset_contributor_.update(i) + list_of_contributors.append(dataset_contributor_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_contributor_ = model.DatasetContributor.from_data(data_obj, i) + model.db.session.add(dataset_contributor_) + list_of_contributors.append(dataset_contributor_.to_dict()) + + list_of_funders = [] + for i in data["funders"]: + if "id" in i and i["id"]: + dataset_funder_ = model.DatasetFunder.query.get(i["id"]) + if not dataset_funder_: + return f"Study link {i['id']} Id is not found", 404 + dataset_funder_.update(i) + list_of_funders.append(dataset_funder_.to_dict()) + elif "id" not in i or not i["id"]: + dataset_funder_ = model.DatasetFunder.from_data(data_obj, i) + model.db.session.add(dataset_funder_) + list_of_funders.append(dataset_funder_.to_dict()) + + data_obj.dataset_managing_organization.update(data["managing_organization"]) + model.db.session.commit() + return { + "creators": list_of_creator, + "contributors": list_of_contributors, + "managing_organization": data_obj.dataset_managing_organization.to_dict(), + "funders": list_of_funders, + }, 200 + + +@api.route( + "/study//dataset//metadata/contributor/" +) +class DatasetContributorDelete(Resource): + """Dataset Contributor Delete Resource""" + + @api.doc("delete contributor") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + contributor_id: int, + ): + """Delete dataset contributor""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + contributor_ = model.DatasetContributor.query.get(contributor_id) + + model.db.session.delete(contributor_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//dataset//metadata/creator/") +class DatasetCreatorDelete(Resource): + @api.doc("delete creator") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + creator_id: int, + ): + """Delete dataset creator""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_creator_ = model.DatasetContributor.query.get(creator_id) + model.db.session.delete(dataset_creator_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//dataset//metadata/funder/") +class DatasetFunderUpdate(Resource): + """Dataset Funder Update Resource""" + + @api.doc("delete funder") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete( + self, + study_id: int, + dataset_id: int, # pylint: disable= unused-argument + funder_id: int, + ): + """Delete dataset funder""" + study_obj = model.Study.query.get(study_id) + if not is_granted("dataset_metadata", study_obj): + return "Access denied, you can not make any change in dataset metadata", 403 + dataset_funder_ = model.DatasetFunder.query.get(funder_id) + + model.db.session.delete(dataset_funder_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/dataset_metadata/dataset_title.py b/apis/dataset_metadata/dataset_title.py deleted file mode 100644 index b9da2382..00000000 --- a/apis/dataset_metadata/dataset_title.py +++ /dev/null @@ -1,128 +0,0 @@ -"""API for dataset title metadata""" - -from typing import Any, Union - -from flask import Response, request -from flask_restx import Resource, fields -from jsonschema import ValidationError, validate - -import model -from apis.authentication import is_granted -from apis.dataset_metadata_namespace import api - -dataset_title = api.model( - "DatasetTitle", - { - "id": fields.String(required=True), - "title": fields.String(required=True), - "type": fields.String(required=True), - }, -) - - -@api.route("/study//dataset//metadata/title") -class DatasetTitleResource(Resource): - """Dataset Title Resource""" - - @api.doc("title") - @api.response(200, "Success") - @api.response(400, "Validation Error") - # @api.param("id", "The dataset identifier") - @api.marshal_with(dataset_title) - def get(self, study_id: int, dataset_id: int): # pylint: disable= unused-argument - """Get dataset title""" - dataset_ = model.Dataset.query.get(dataset_id) - dataset_title_ = dataset_.dataset_title - return [d.to_dict() for d in dataset_title_], 200 - - @api.doc("update title") - @api.response(201, "Success") - @api.response(400, "Validation Error") - def post(self, study_id: int, dataset_id: int): - """Update dataset title""" - study_obj = model.Study.query.get(study_id) - - if not is_granted("dataset_metadata", study_obj): - return "Access denied, you can not make any change in dataset metadata", 403 - - schema = { - "type": "array", - "items": { - "type": "object", - "additionalProperties": False, - "properties": { - "id": {"type": "string"}, - "title": { - "type": "string", - "minLength": 1, - }, - "type": { - "type": "string", - "enum": [ - "MainTitle", - "AlternativeTitle", - "Subtitle", - "TranslatedTitle", - "OtherTitle", - ], - }, - }, - "required": ["title", "type"], - }, - "uniqueItems": True, - } - - try: - validate(instance=request.json, schema=schema) - except ValidationError as err: - return err.message, 400 - - data: Union[Any, dict] = request.json - data_obj = model.Dataset.query.get(dataset_id) - list_of_elements = [] - for i in data: - if "id" in i and i["id"]: - dataset_title_ = model.DatasetTitle.query.get(i["id"]) - dataset_title_.update(i) - list_of_elements.append(dataset_title_.to_dict()) - elif "id" not in i or not i["id"]: - if i["type"] == "MainTitle": - return ( - "Main Title type can not be given", - 403, - ) - dataset_title_ = model.DatasetTitle.from_data(data_obj, i) - model.db.session.add(dataset_title_) - list_of_elements.append(dataset_title_.to_dict()) - model.db.session.commit() - return list_of_elements, 201 - - @api.route("/study//dataset//metadata/title/") - class DatasetTitleDelete(Resource): - """Dataset Title Update Resource""" - - @api.doc("delete title") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete( - self, - study_id: int, - dataset_id: int, # pylint: disable= unused-argument - title_id: int, - ): - """Delete dataset title""" - study_obj = model.Study.query.get(study_id) - if not is_granted("dataset_metadata", study_obj): - return ( - "Access denied, you can not make any change in dataset metadata", - 403, - ) - dataset_title_ = model.DatasetTitle.query.get(title_id) - if dataset_title_.type == "MainTitle": - return ( - "Main Title type can not be deleted", - 403, - ) - model.db.session.delete(dataset_title_) - model.db.session.commit() - return Response(status=204) diff --git a/tests/functional/test_040_study_dataset_metadata_api.py b/tests/functional/test_040_study_dataset_metadata_api.py index e6a9d927..a309b84b 100644 --- a/tests/functional/test_040_study_dataset_metadata_api.py +++ b/tests/functional/test_040_study_dataset_metadata_api.py @@ -6,85 +6,162 @@ import pytest -# ------------------- ACCESS METADATA ------------------- # -def test_put_dataset_access_metadata(clients): +# ------------------- ACCESS-RIGHTS METADATA ------------------- # +def test_post_dataset_access_rights_metadata(clients): """ Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (PUT) + When the '/study/{study_id}/dataset/{dataset_id}/metadata/access-rights' endpoint is requested (PUT) Then check that the response is valid and updates the dataset access metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", json={ - "type": "type", - "description": "description", - "url": "google.com", - "url_last_checked": 123, + "access": { + "type": "type", + "description": "description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], }, ) response_data = json.loads(response.data) assert response.status_code == 200 + pytest.global_dataset_rights_id = response_data["rights"][0]["id"] - assert response_data["type"] == "type" - assert response_data["description"] == "description" - assert response_data["url"] == "google.com" - assert response_data["url_last_checked"] == 123 + assert response_data["access"]["type"] == "type" + assert response_data["access"]["description"] == "description" + assert response_data["access"]["url"] == "google.com" + assert response_data["access"]["url_last_checked"] == 123 - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + assert response_data["rights"][0]["identifier"] == "Identifier" + assert response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][0]["rights"] == "Rights" + assert response_data["rights"][0]["uri"] == "URI" + assert response_data["rights"][0]["license_text"] == "license text" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", json={ - "type": "admin type", - "description": "admin description", - "url": "google.com", - "url_last_checked": 123, + "access": { + "type": "admin type", + "description": "admin description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Admin Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], }, ) assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) + pytest.global_dataset_rights_id_admin = admin_response_data["rights"][0]["id"] - assert admin_response_data["type"] == "admin type" - assert admin_response_data["description"] == "admin description" - assert admin_response_data["url"] == "google.com" - assert admin_response_data["url_last_checked"] == 123 + assert admin_response_data["access"]["type"] == "admin type" + assert admin_response_data["access"]["description"] == "admin description" + assert admin_response_data["access"]["url"] == "google.com" + assert admin_response_data["access"]["url_last_checked"] == 123 - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + assert admin_response_data["rights"][0]["identifier"] == "Admin Identifier" + assert admin_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" + ) + assert admin_response_data["rights"][0]["rights"] == "Rights" + assert admin_response_data["rights"][0]["uri"] == "URI" + assert admin_response_data["rights"][0]["license_text"] == "license text" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", json={ - "type": "editor type", - "description": "editor description", - "url": "google.com", - "url_last_checked": 123, + "access": { + "type": "editor type", + "description": "editor description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Editor Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], }, ) assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_rights_id_editor = editor_response_data["rights"][0]["id"] - assert editor_response_data["type"] == "editor type" - assert editor_response_data["description"] == "editor description" - assert editor_response_data["url"] == "google.com" - assert editor_response_data["url_last_checked"] == 123 + assert editor_response_data["access"]["type"] == "editor type" + assert editor_response_data["access"]["description"] == "editor description" + assert editor_response_data["access"]["url"] == "google.com" + assert editor_response_data["access"]["url_last_checked"] == 123 - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access", + assert editor_response_data["rights"][0]["identifier"] == "Editor Identifier" + assert editor_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][0]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert editor_response_data["rights"][0]["rights"] == "Rights" + assert editor_response_data["rights"][0]["uri"] == "URI" + assert editor_response_data["rights"][0]["license_text"] == "license text" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", json={ - "type": "viewer type", - "description": "viewer description", - "url": "google.com", - "url_last_checked": 123, + "access": { + "type": "viewer type", + "description": "viewer description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Viewer Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], }, ) assert viewer_response.status_code == 403 -def test_get_dataset_access_metadata(clients): +def test_get_dataset_access_rights_metadata(clients): """ Given a Flask application configured for testing and a study ID When the '/study/{study_id}/dataset/{dataset_id}/metadata/access' endpoint is requested (GET) @@ -95,16 +172,16 @@ def test_get_dataset_access_metadata(clients): dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" ) admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" ) editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" ) viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/access" + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights" ) assert response.status_code == 200 @@ -118,312 +195,270 @@ def test_get_dataset_access_metadata(clients): viewer_response_data = json.loads(viewer_response.data) # Since editor was the last successful PUT request, the response data should match - assert response_data["type"] == "editor type" - assert response_data["description"] == "editor description" - assert response_data["url"] == "google.com" - assert response_data["url_last_checked"] == 123 - - assert admin_response_data["type"] == "editor type" - assert admin_response_data["description"] == "editor description" - assert admin_response_data["url"] == "google.com" - assert admin_response_data["url_last_checked"] == 123 - - assert editor_response_data["type"] == "editor type" - assert editor_response_data["description"] == "editor description" - assert editor_response_data["url"] == "google.com" - assert editor_response_data["url_last_checked"] == 123 - - assert viewer_response_data["type"] == "editor type" - assert viewer_response_data["description"] == "editor description" - assert viewer_response_data["url"] == "google.com" - assert viewer_response_data["url_last_checked"] == 123 - - -# ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # -def test_post_alternative_identifier(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset alternative identifier - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "identifier test", - "type": "ARK", - } - ], + assert response_data["access"]["type"] == "editor type" + assert response_data["access"]["description"] == "editor description" + assert response_data["access"]["url"] == "google.com" + assert response_data["access"]["url_last_checked"] == 123 + ### + assert response_data["rights"][0]["identifier"] == "Identifier" + assert response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][0]["rights"] == "Rights" + assert response_data["rights"][0]["uri"] == "URI" + assert response_data["rights"][0]["license_text"] == "license text" + + assert admin_response_data["rights"][0]["identifier"] == "Identifier" + assert admin_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][0]["identifier_scheme_uri"] == "Identifier Scheme" ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_alternative_identifier_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ARK" + assert admin_response_data["rights"][0]["rights"] == "Rights" + assert admin_response_data["rights"][0]["uri"] == "URI" + assert admin_response_data["rights"][0]["license_text"] == "license text" - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "admin test", - "type": "ARK", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "editor test", - "type": "ARK", - } - ], - ) - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", - json=[ - { - "identifier": "viewer test", - "type": "ARK", - } - ], + assert editor_response_data["rights"][0]["identifier"] == "Identifier" + assert editor_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][0]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert editor_response_data["rights"][0]["rights"] == "Rights" + assert editor_response_data["rights"][0]["uri"] == "URI" + assert editor_response_data["rights"][0]["license_text"] == "license text" + + assert response_data["rights"][1]["identifier"] == "Admin Identifier" + assert response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][1]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][1]["rights"] == "Rights" + assert response_data["rights"][1]["uri"] == "URI" + assert response_data["rights"][1]["license_text"] == "license text" + + assert admin_response_data["rights"][1]["identifier"] == "Admin Identifier" + assert admin_response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][1]["identifier_scheme_uri"] == "Identifier Scheme" ) + assert admin_response_data["rights"][1]["rights"] == "Rights" + assert admin_response_data["rights"][1]["uri"] == "URI" + assert admin_response_data["rights"][1]["license_text"] == "license text" - assert admin_response.status_code == 201 - assert editor_response.status_code == 201 - assert viewer_response.status_code == 403 - - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - pytest.global_alternative_identifier_id_admin = admin_response_data[0]["id"] - pytest.global_alternative_identifier_id_editor = editor_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "admin test" - assert admin_response_data[0]["type"] == "ARK" - assert editor_response_data[0]["identifier"] == "editor test" - assert editor_response_data[0]["type"] == "ARK" - - -def test_get_alternative_identifier(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset alternative identifier content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" + assert editor_response_data["rights"][1]["identifier"] == "Admin Identifier" + assert editor_response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][1]["identifier_scheme_uri"] + == "Identifier Scheme" + ) + assert editor_response_data["rights"][1]["rights"] == "Rights" + assert editor_response_data["rights"][1]["uri"] == "URI" + assert editor_response_data["rights"][1]["license_text"] == "license text" + + assert response_data["rights"][2]["identifier"] == "Editor Identifier" + assert response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert response_data["rights"][2]["identifier_scheme_uri"] == "Identifier Scheme" + assert response_data["rights"][2]["rights"] == "Rights" + assert response_data["rights"][2]["uri"] == "URI" + assert response_data["rights"][2]["license_text"] == "license text" + + assert admin_response_data["rights"][2]["identifier"] == "Editor Identifier" + assert admin_response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert ( + admin_response_data["rights"][2]["identifier_scheme_uri"] == "Identifier Scheme" ) + assert admin_response_data["rights"][2]["rights"] == "Rights" + assert admin_response_data["rights"][2]["uri"] == "URI" + assert admin_response_data["rights"][2]["license_text"] == "license text" - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data[0]["identifier"] == "identifier test" - assert response_data[0]["type"] == "ARK" - assert response_data[1]["identifier"] == "admin test" - assert response_data[1]["type"] == "ARK" - assert response_data[2]["identifier"] == "editor test" - assert response_data[2]["type"] == "ARK" - - assert admin_response_data[0]["identifier"] == "identifier test" - assert admin_response_data[0]["type"] == "ARK" - assert admin_response_data[1]["identifier"] == "admin test" - assert admin_response_data[1]["type"] == "ARK" - assert admin_response_data[2]["identifier"] == "editor test" - assert admin_response_data[2]["type"] == "ARK" - - assert editor_response_data[0]["identifier"] == "identifier test" - assert editor_response_data[0]["type"] == "ARK" - assert editor_response_data[1]["identifier"] == "admin test" - assert editor_response_data[1]["type"] == "ARK" - assert editor_response_data[2]["identifier"] == "editor test" - assert editor_response_data[2]["type"] == "ARK" - - assert viewer_response_data[0]["identifier"] == "identifier test" - assert viewer_response_data[0]["type"] == "ARK" - assert viewer_response_data[1]["identifier"] == "admin test" - assert viewer_response_data[1]["type"] == "ARK" - assert viewer_response_data[2]["identifier"] == "editor test" - assert viewer_response_data[2]["type"] == "ARK" - - -def test_delete_alternative_identifier(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset alternative identifier content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - identifier_id = pytest.global_alternative_identifier_id - admin_identifier_id = pytest.global_alternative_identifier_id_admin - editor_identifier_id = pytest.global_alternative_identifier_id_editor - - # verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + assert editor_response_data["rights"][2]["identifier"] == "Editor Identifier" + assert editor_response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert ( + editor_response_data["rights"][2]["identifier_scheme_uri"] + == "Identifier Scheme" ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" + assert editor_response_data["rights"][2]["rights"] == "Rights" + assert editor_response_data["rights"][2]["uri"] == "URI" + assert editor_response_data["rights"][2]["license_text"] == "license text" + + assert viewer_response_data["rights"][0]["identifier"] == "Identifier" + assert viewer_response_data["rights"][0]["identifier_scheme"] == "Identifier Scheme" + assert ( + viewer_response_data["rights"][0]["identifier_scheme_uri"] + == "Identifier Scheme" ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{admin_identifier_id}" + assert viewer_response_data["rights"][0]["rights"] == "Rights" + assert viewer_response_data["rights"][0]["uri"] == "URI" + assert viewer_response_data["rights"][0]["license_text"] == "license text" + + assert viewer_response_data["rights"][1]["identifier"] == "Admin Identifier" + assert viewer_response_data["rights"][1]["identifier_scheme"] == "Identifier Scheme" + assert ( + viewer_response_data["rights"][1]["identifier_scheme_uri"] + == "Identifier Scheme" ) - # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{editor_identifier_id}" + assert viewer_response_data["rights"][1]["rights"] == "Rights" + assert viewer_response_data["rights"][1]["uri"] == "URI" + assert viewer_response_data["rights"][1]["license_text"] == "license text" + + assert viewer_response_data["rights"][2]["identifier"] == "Editor Identifier" + assert viewer_response_data["rights"][2]["identifier_scheme"] == "Identifier Scheme" + assert ( + viewer_response_data["rights"][2]["identifier_scheme_uri"] + == "Identifier Scheme" ) + assert viewer_response_data["rights"][2]["rights"] == "Rights" + assert viewer_response_data["rights"][2]["uri"] == "URI" + assert viewer_response_data["rights"][2]["license_text"] == "license text" + ####### + assert admin_response_data["access"]["type"] == "editor type" + assert admin_response_data["access"]["description"] == "editor description" + assert admin_response_data["access"]["url"] == "google.com" + assert admin_response_data["access"]["url_last_checked"] == 123 - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + assert editor_response_data["access"]["type"] == "editor type" + assert editor_response_data["access"]["description"] == "editor description" + assert editor_response_data["access"]["url"] == "google.com" + assert editor_response_data["access"]["url_last_checked"] == 123 + assert viewer_response_data["access"]["type"] == "editor type" + assert viewer_response_data["access"]["description"] == "editor description" + assert viewer_response_data["access"]["url"] == "google.com" + assert viewer_response_data["access"]["url_last_checked"] == 123 -# ------------------- CONSENT METADATA ------------------- # -def test_put_dataset_consent_metadata(clients): + +# ------------------- GENERAL INFORMATION METADATA ------------------- # +def test_post_dataset_general_information_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (PUT) - Then check that the response is valid and updates the dataset consent metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/general-description' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + general information metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", json={ - "type": "test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "test", + "titles": [{"title": "Owner Title", "type": "Subtitle"}], + "descriptions": [{"description": "Owner Description", "type": "Methods"}], + "dates": [{"date": 20210101, "type": "Accepted", "information": "Info"}], }, ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["type"] == "test" - assert response_data["noncommercial"] is True - assert response_data["geog_restrict"] is True - assert response_data["research_type"] is True - assert response_data["genetic_only"] is True - assert response_data["no_methods"] is True - assert response_data["details"] == "test" + pytest.global_dataset_title_id = response_data["titles"][0]["id"] + pytest.global_dataset_description_id = response_data["descriptions"][0]["id"] + pytest.global_dataset_date_id = response_data["dates"][0]["id"] - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + assert response_data["titles"][0]["title"] == "Owner Title" + assert response_data["titles"][0]["type"] == "Subtitle" + assert response_data["descriptions"][0]["description"] == "Owner Description" + assert response_data["descriptions"][0]["type"] == "Methods" + + assert response_data["dates"][0]["date"] == 20210101 + assert response_data["dates"][0]["type"] == "Accepted" + assert response_data["dates"][0]["information"] == "Info" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", json={ - "type": "admin test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "admin details test", + "titles": [{"title": "Admin Title", "type": "Subtitle"}], + "descriptions": [{"description": "Admin Description", "type": "Methods"}], + "dates": [{"date": 20210102, "type": "Accepted", "information": "Info"}], }, ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + # assert admin_response.status_code == 200 - assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - assert admin_response_data["type"] == "admin test" - assert admin_response_data["details"] == "admin details test" + pytest.global_dataset_title_id_admin = admin_response_data["titles"][0]["id"] + pytest.global_dataset_description_id_admin = admin_response_data["descriptions"][0][ + "id" + ] + pytest.global_dataset_date_id_admin = admin_response_data["dates"][0]["id"] - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + assert admin_response_data["titles"][0]["title"] == "Admin Title" + assert admin_response_data["titles"][0]["type"] == "Subtitle" + assert admin_response_data["descriptions"][0]["description"] == "Admin Description" + assert admin_response_data["descriptions"][0]["type"] == "Methods" + + assert admin_response_data["dates"][0]["date"] == 20210102 + assert admin_response_data["dates"][0]["type"] == "Accepted" + assert admin_response_data["dates"][0]["information"] == "Info" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", json={ - "type": "editor test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "editor details test", + "titles": [{"title": "Editor Title", "type": "Subtitle"}], + "descriptions": [{"description": "Editor Description", "type": "Methods"}], + "dates": [{"date": 20210103, "type": "Accepted", "information": "Info"}], }, ) assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) + pytest.global_dataset_title_id_editor = editor_response_data["titles"][0]["id"] + pytest.global_dataset_description_id_editor = editor_response_data["descriptions"][ + 0 + ]["id"] + pytest.global_dataset_date_id_editor = editor_response_data["dates"][0]["id"] - assert editor_response_data["type"] == "editor test" - assert editor_response_data["details"] == "editor details test" + assert editor_response_data["titles"][0]["title"] == "Editor Title" + assert editor_response_data["titles"][0]["type"] == "Subtitle" - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent", + assert ( + editor_response_data["descriptions"][0]["description"] == "Editor Description" + ) + assert editor_response_data["descriptions"][0]["type"] == "Methods" + + assert editor_response_data["dates"][0]["date"] == 20210103 + assert editor_response_data["dates"][0]["type"] == "Accepted" + assert editor_response_data["dates"][0]["information"] == "Info" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", json={ - "type": "viewer test", - "noncommercial": True, - "geog_restrict": True, - "research_type": True, - "genetic_only": True, - "no_methods": True, - "details": "viewer details test", + "titles": [{"title": "Viewer Title", "type": "Subtitle"}], + "descriptions": [{"description": "Viewer Description", "type": "Methods"}], + "dates": [{"date": 20210103, "type": "Accepted", "information": "Info"}], }, ) assert viewer_response.status_code == 403 -def test_get_dataset_consent_metadata(clients): +# ------------------- DELETE GENERAL INFORMATION METADATA ------------------- # +def test_get_dataset_general_information_metadata(clients): """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/consent' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset consent metadata content + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/general-information' + endpoint is requested (GET) + Then check that the response is valid and retrieves the dataset + general information metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" ) admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" ) editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" ) viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/consent" + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information" ) assert response.status_code == 200 @@ -436,1066 +471,205 @@ def test_get_dataset_consent_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - # Editor was the last successful PUT request, so the response data should match - assert response_data["type"] == "editor test" - assert response_data["noncommercial"] is True - assert response_data["geog_restrict"] is True - assert response_data["research_type"] is True - assert response_data["genetic_only"] is True - assert response_data["no_methods"] is True - assert response_data["details"] == "editor details test" - - assert admin_response_data["type"] == "editor test" - assert admin_response_data["noncommercial"] is True - assert admin_response_data["geog_restrict"] is True - assert admin_response_data["research_type"] is True - assert admin_response_data["genetic_only"] is True - assert admin_response_data["no_methods"] is True - assert admin_response_data["details"] == "editor details test" - - assert editor_response_data["type"] == "editor test" - assert editor_response_data["noncommercial"] is True - assert editor_response_data["geog_restrict"] is True - assert editor_response_data["research_type"] is True - assert editor_response_data["genetic_only"] is True - assert editor_response_data["no_methods"] is True - assert editor_response_data["details"] == "editor details test" - - assert viewer_response_data["type"] == "editor test" - assert viewer_response_data["noncommercial"] is True - assert viewer_response_data["geog_restrict"] is True - assert viewer_response_data["research_type"] is True - assert viewer_response_data["genetic_only"] is True - assert viewer_response_data["no_methods"] is True - assert viewer_response_data["details"] == "editor details test" - - -# ------------------- CONTRIBUTOR METADATA ------------------- # -def test_post_dataset_contributor_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset contributor metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - # Add a one second delay to prevent duplicate timestamps - sleep(1) - response_data = json.loads(response.data) + owner_titles = response_data["titles"] + owner_descriptions = response_data["descriptions"] + owner_dates = response_data["dates"] - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_contributor_id = response_data[0]["id"] - - assert response_data[0]["given_name"] == "Given Name here" - assert response_data[0]["family_name"] == "Family Name here" - assert response_data[0]["name_type"] == "Personal" - assert response_data[0]["name_identifier"] == "Name identifier" - assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[0]["creator"] is False - assert response_data[0]["contributor_type"] == "Con Type" - assert response_data[0]["affiliations"][0]["name"] == "Test" - assert response_data[0]["affiliations"][0]["identifier"] == "yes" - assert response_data[0]["affiliations"][0]["scheme"] == "uh" - assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + admin_titles = admin_response_data["titles"] + admin_descriptions = admin_response_data["descriptions"] + admin_dates = admin_response_data["dates"] - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Admin Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) + editor_titles = editor_response_data["titles"] + editor_descriptions = editor_response_data["descriptions"] + editor_dates = editor_response_data["dates"] - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_contributor_id_admin = admin_response_data[0]["id"] + viewer_titles = viewer_response_data["titles"] + viewer_descriptions = viewer_response_data["descriptions"] + viewer_dates = viewer_response_data["dates"] - assert admin_response_data[0]["given_name"] == "Admin Given Name here" + assert len(owner_titles) == 4 + assert len(admin_titles) == 4 + assert len(editor_titles) == 4 + assert len(viewer_titles) == 4 - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Editor Given Name here", - "family_name": "Editor Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_contributor_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["given_name"] == "Editor Given Name here" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Viewer Given Name here", - "family_name": "Viewer Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_contributor_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset contributor metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - -def test_delete_dataset_contributor_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset contributor metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - contributor_id = pytest.global_dataset_contributor_id - admin_contributor_id = pytest.global_dataset_contributor_id_admin - editor_contributor_id = pytest.global_dataset_contributor_id_editor - - # Verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" - ) - # pylint: disable=line-too-long - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{admin_contributor_id}" - ) + # search for maintitle index # pylint: disable=line-too-long - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{editor_contributor_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- CREATOR METADATA ------------------- # -def test_post_dataset_creator_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (POST) - Then check that the response is valid and creates the dataset creator metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_creator_id = response_data[0]["id"] - - assert response_data[0]["given_name"] == "Given Name here" - assert response_data[0]["family_name"] == "Family Name here" - assert response_data[0]["name_type"] == "Personal" - assert response_data[0]["name_identifier"] == "Name identifier" - assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[0]["creator"] is True - assert response_data[0]["affiliations"][0]["name"] == "Test" - assert response_data[0]["affiliations"][0]["identifier"] == "yes" - assert response_data[0]["affiliations"][0]["scheme"] == "uh" - assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Admin Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_creator_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["given_name"] == "Admin Given Name here" - assert admin_response_data[0]["family_name"] == "Family Name here" - assert admin_response_data[0]["name_type"] == "Personal" - assert admin_response_data[0]["name_identifier"] == "Name identifier" - assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[0]["creator"] is True - assert admin_response_data[0]["affiliations"][0]["name"] == "Test" - assert admin_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Editor Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_creator_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["given_name"] == "Editor Given Name here" - assert editor_response_data[0]["family_name"] == "Family Name here" - assert editor_response_data[0]["name_type"] == "Personal" - assert editor_response_data[0]["name_identifier"] == "Name identifier" - assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[0]["creator"] is True - assert editor_response_data[0]["affiliations"][0]["name"] == "Test" - assert editor_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Viewer Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_creator_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset creator metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert len(response_data) == 3 - assert len(admin_response_data) == 3 - assert len(editor_response_data) == 3 - assert len(viewer_response_data) == 3 - - assert response_data[0]["id"] == pytest.global_dataset_creator_id - assert response_data[0]["given_name"] == "Given Name here" - assert response_data[0]["family_name"] == "Family Name here" - assert response_data[0]["name_type"] == "Personal" - assert response_data[0]["name_identifier"] == "Name identifier" - assert response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[0]["creator"] is True - assert response_data[0]["affiliations"][0]["name"] == "Test" - assert response_data[0]["affiliations"][0]["identifier"] == "yes" - assert response_data[0]["affiliations"][0]["scheme"] == "uh" - assert response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert response_data[1]["id"] == pytest.global_dataset_creator_id_admin - assert response_data[1]["given_name"] == "Admin Given Name here" - assert response_data[1]["family_name"] == "Family Name here" - assert response_data[1]["name_type"] == "Personal" - assert response_data[1]["name_identifier"] == "Name identifier" - assert response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[1]["creator"] is True - assert response_data[1]["affiliations"][0]["name"] == "Test" - assert response_data[1]["affiliations"][0]["identifier"] == "yes" - assert response_data[1]["affiliations"][0]["scheme"] == "uh" - assert response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert response_data[2]["id"] == pytest.global_dataset_creator_id_editor - assert response_data[2]["given_name"] == "Editor Given Name here" - assert response_data[2]["family_name"] == "Family Name here" - assert response_data[2]["name_type"] == "Personal" - assert response_data[2]["name_identifier"] == "Name identifier" - assert response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert response_data[2]["creator"] is True - assert response_data[2]["affiliations"][0]["name"] == "Test" - assert response_data[2]["affiliations"][0]["identifier"] == "yes" - assert response_data[2]["affiliations"][0]["scheme"] == "uh" - assert response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - assert admin_response_data[0]["given_name"] == "Given Name here" - assert admin_response_data[0]["family_name"] == "Family Name here" - assert admin_response_data[0]["name_type"] == "Personal" - assert admin_response_data[0]["name_identifier"] == "Name identifier" - assert admin_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[0]["creator"] is True - assert admin_response_data[0]["affiliations"][0]["name"] == "Test" - assert admin_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert admin_response_data[1]["given_name"] == "Admin Given Name here" - assert admin_response_data[1]["family_name"] == "Family Name here" - assert admin_response_data[1]["name_type"] == "Personal" - assert admin_response_data[1]["name_identifier"] == "Name identifier" - assert admin_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[1]["creator"] is True - assert admin_response_data[1]["affiliations"][0]["name"] == "Test" - assert admin_response_data[1]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[1]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert admin_response_data[2]["given_name"] == "Editor Given Name here" - assert admin_response_data[2]["family_name"] == "Family Name here" - assert admin_response_data[2]["name_type"] == "Personal" - assert admin_response_data[2]["name_identifier"] == "Name identifier" - assert admin_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert admin_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert admin_response_data[2]["creator"] is True - assert admin_response_data[2]["affiliations"][0]["name"] == "Test" - assert admin_response_data[2]["affiliations"][0]["identifier"] == "yes" - assert admin_response_data[2]["affiliations"][0]["scheme"] == "uh" - assert admin_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - assert editor_response_data[0]["given_name"] == "Given Name here" - assert editor_response_data[0]["family_name"] == "Family Name here" - assert editor_response_data[0]["name_type"] == "Personal" - assert editor_response_data[0]["name_identifier"] == "Name identifier" - assert editor_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[0]["creator"] is True - assert editor_response_data[0]["affiliations"][0]["name"] == "Test" - assert editor_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert editor_response_data[1]["given_name"] == "Admin Given Name here" - assert editor_response_data[1]["family_name"] == "Family Name here" - assert editor_response_data[1]["name_type"] == "Personal" - assert editor_response_data[1]["name_identifier"] == "Name identifier" - assert editor_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[1]["creator"] is True - assert editor_response_data[1]["affiliations"][0]["name"] == "Test" - assert editor_response_data[1]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[1]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert editor_response_data[2]["given_name"] == "Editor Given Name here" - assert editor_response_data[2]["family_name"] == "Family Name here" - assert editor_response_data[2]["name_type"] == "Personal" - assert editor_response_data[2]["name_identifier"] == "Name identifier" - assert editor_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert editor_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert editor_response_data[2]["creator"] is True - assert editor_response_data[2]["affiliations"][0]["name"] == "Test" - assert editor_response_data[2]["affiliations"][0]["identifier"] == "yes" - assert editor_response_data[2]["affiliations"][0]["scheme"] == "uh" - assert editor_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - assert viewer_response_data[0]["given_name"] == "Given Name here" - assert viewer_response_data[0]["family_name"] == "Family Name here" - assert viewer_response_data[0]["name_type"] == "Personal" - assert viewer_response_data[0]["name_identifier"] == "Name identifier" - assert viewer_response_data[0]["name_identifier_scheme"] == "Name Scheme ID" - assert viewer_response_data[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert viewer_response_data[0]["creator"] is True - assert viewer_response_data[0]["affiliations"][0]["name"] == "Test" - assert viewer_response_data[0]["affiliations"][0]["identifier"] == "yes" - assert viewer_response_data[0]["affiliations"][0]["scheme"] == "uh" - assert viewer_response_data[0]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[1]["given_name"] == "Admin Given Name here" - assert viewer_response_data[1]["family_name"] == "Family Name here" - assert viewer_response_data[1]["name_type"] == "Personal" - assert viewer_response_data[1]["name_identifier"] == "Name identifier" - assert viewer_response_data[1]["name_identifier_scheme"] == "Name Scheme ID" - assert viewer_response_data[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert viewer_response_data[1]["creator"] is True - assert viewer_response_data[1]["affiliations"][0]["name"] == "Test" - assert viewer_response_data[1]["affiliations"][0]["identifier"] == "yes" - assert viewer_response_data[1]["affiliations"][0]["scheme"] == "uh" - assert viewer_response_data[1]["affiliations"][0]["scheme_uri"] == "scheme uri" - assert viewer_response_data[2]["given_name"] == "Editor Given Name here" - assert viewer_response_data[2]["family_name"] == "Family Name here" - assert viewer_response_data[2]["name_type"] == "Personal" - assert viewer_response_data[2]["name_identifier"] == "Name identifier" - assert viewer_response_data[2]["name_identifier_scheme"] == "Name Scheme ID" - assert viewer_response_data[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" - assert viewer_response_data[2]["creator"] is True - assert viewer_response_data[2]["affiliations"][0]["name"] == "Test" - assert viewer_response_data[2]["affiliations"][0]["identifier"] == "yes" - assert viewer_response_data[2]["affiliations"][0]["scheme"] == "uh" - assert viewer_response_data[2]["affiliations"][0]["scheme_uri"] == "scheme uri" - - -def test_delete_dataset_creator_metadata(clients): - """ - Given a Flask application configured for testing and a study ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset creator metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - creator_id = pytest.global_dataset_creator_id - admin_creator_id = pytest.global_dataset_creator_id_admin - editor_creator_id = pytest.global_dataset_creator_id_editor - - # Verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{admin_creator_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{editor_creator_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- DATE METADATA ------------------- # -def test_post_dataset_date_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset date metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210101, "type": "Type", "information": "Info"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_date_id = response_data[0]["id"] - - assert response_data[0]["date"] == 20210101 - assert response_data[0]["type"] == "Type" - assert response_data[0]["information"] == "Info" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210102, "type": "Type", "information": "Info"}], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_date_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["date"] == 20210102 - assert admin_response_data[0]["type"] == "Type" - assert admin_response_data[0]["information"] == "Info" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210103, "type": "Type", "information": "Info"}], - ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_date_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["date"] == 20210103 - assert editor_response_data[0]["type"] == "Type" - assert editor_response_data[0]["information"] == "Info" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210101, "type": "Type", "information": "Info"}], - ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_date_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset date metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date" - ) - - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert len(response_data) == 3 - assert len(admin_response_data) == 3 - assert len(editor_response_data) == 3 - assert len(viewer_response_data) == 3 - - assert response_data[0]["date"] == 20210101 - assert response_data[0]["type"] == "Type" - assert response_data[0]["information"] == "Info" - assert response_data[1]["date"] == 20210102 - assert response_data[1]["type"] == "Type" - assert response_data[1]["information"] == "Info" - assert response_data[2]["date"] == 20210103 - assert response_data[2]["type"] == "Type" - - assert admin_response_data[0]["date"] == 20210101 - assert admin_response_data[0]["type"] == "Type" - assert admin_response_data[0]["information"] == "Info" - assert admin_response_data[1]["date"] == 20210102 - assert admin_response_data[1]["type"] == "Type" - assert admin_response_data[1]["information"] == "Info" - assert admin_response_data[2]["date"] == 20210103 - assert admin_response_data[2]["type"] == "Type" - - assert editor_response_data[0]["date"] == 20210101 - assert editor_response_data[0]["type"] == "Type" - assert editor_response_data[0]["information"] == "Info" - assert editor_response_data[1]["date"] == 20210102 - assert editor_response_data[1]["type"] == "Type" - assert editor_response_data[1]["information"] == "Info" - assert editor_response_data[2]["date"] == 20210103 - assert editor_response_data[2]["type"] == "Type" - - assert viewer_response_data[0]["date"] == 20210101 - assert viewer_response_data[0]["type"] == "Type" - assert viewer_response_data[0]["information"] == "Info" - assert viewer_response_data[1]["date"] == 20210102 - assert viewer_response_data[1]["type"] == "Type" - assert viewer_response_data[1]["information"] == "Info" - assert viewer_response_data[2]["date"] == 20210103 - assert viewer_response_data[2]["type"] == "Type" - - -def test_delete_dataset_date_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset date metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - date_id = pytest.global_dataset_date_id - admin_date_id = pytest.global_dataset_date_id_admin - editor_date_id = pytest.global_dataset_date_id_editor - - # Verify Viewer cannot delete - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{admin_date_id}" - ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{editor_date_id}" - ) - - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 - - -# ------------------- DE-IDENTIFICATION LEVEL METADATA ------------------- # -def test_put_dataset_deidentification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - de-identification metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, + main_title = next( + (index for (index, d) in enumerate(owner_titles) if d["type"] == "MainTitle"), + None, ) - - assert response.status_code == 200 - response_data = json.loads(response.data) - - assert response_data["type"] == "Level" - assert response_data["direct"] is True - assert response_data["hipaa"] is True - assert response_data["dates"] is True - assert response_data["nonarr"] is True - assert response_data["k_anon"] is True - assert response_data["details"] == "Details" - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, + a_main_title = next( + ( + index + for (index, d) in enumerate(admin_response_data["titles"]) + if d["type"] == "MainTitle" + ), + None, ) - - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["type"] == "Level" - assert admin_response_data["direct"] is True - assert admin_response_data["hipaa"] is True - assert admin_response_data["dates"] is True - assert admin_response_data["nonarr"] is True - assert admin_response_data["k_anon"] is True - assert admin_response_data["details"] == "Details" - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, + e_main_title = next( + (index for (index, d) in enumerate(editor_titles) if d["type"] == "MainTitle"), + None, ) - - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["type"] == "Level" - assert editor_response_data["direct"] is True - assert editor_response_data["hipaa"] is True - assert editor_response_data["dates"] is True - assert editor_response_data["nonarr"] is True - assert editor_response_data["k_anon"] is True - assert editor_response_data["details"] == "Details" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level", - json={ - "type": "Level", - "direct": True, - "hipaa": True, - "dates": True, - "nonarr": True, - "k_anon": True, - "details": "Details", - }, + v_main_title = next( + (index for (index, d) in enumerate(viewer_titles) if d["type"] == "MainTitle"), + None, ) - - assert viewer_response.status_code == 403 - - -def test_get_dataset_deidentification_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/de-identification' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - de-identification metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + # search for admin title index + admin_title = next( + ( + index + for (index, d) in enumerate(owner_titles) + if d["title"] == "Admin Title" + ), + None, ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + a_admin_title = next( + ( + index + for (index, d) in enumerate(admin_titles) + if d["title"] == "Admin Title" + ), + None, ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + e_admin_title = next( + ( + index + for (index, d) in enumerate(editor_titles) + if d["title"] == "Admin Title" + ), + None, ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/de-identification-level" + v_admin_title = next( + ( + index + for (index, d) in enumerate(viewer_titles) + if d["title"] == "Admin Title" + ), + None, ) - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data["type"] == "Level" - assert response_data["direct"] is True - assert response_data["hipaa"] is True - assert response_data["dates"] is True - assert response_data["nonarr"] is True - assert response_data["k_anon"] is True - assert response_data["details"] == "Details" - - assert admin_response_data["type"] == "Level" - assert admin_response_data["direct"] is True - assert admin_response_data["hipaa"] is True - assert admin_response_data["dates"] is True - assert admin_response_data["nonarr"] is True - assert admin_response_data["k_anon"] is True - assert admin_response_data["details"] == "Details" - - assert editor_response_data["type"] == "Level" - assert editor_response_data["direct"] is True - assert editor_response_data["hipaa"] is True - assert editor_response_data["dates"] is True - assert editor_response_data["nonarr"] is True - assert editor_response_data["k_anon"] is True - assert editor_response_data["details"] == "Details" - - assert viewer_response_data["type"] == "Level" - assert viewer_response_data["direct"] is True - assert viewer_response_data["hipaa"] is True - assert viewer_response_data["dates"] is True - assert viewer_response_data["nonarr"] is True - assert viewer_response_data["k_anon"] is True - assert viewer_response_data["details"] == "Details" - - -# ------------------- DESCRIPTION METADATA ------------------- # -def test_post_dataset_descriptions_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Owner Description", "type": "Methods"}], + # search for editor title index + editor_title = next( + ( + index + for (index, d) in enumerate(owner_titles) + if d["title"] == "Editor Title" + ), + None, ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 - response_data = json.loads(response.data) - pytest.global_dataset_description_id = response_data[0]["id"] - - assert response_data[0]["description"] == "Owner Description" - assert response_data[0]["type"] == "Methods" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Admin Description", "type": "Methods"}], + a_editor_title = next( + ( + index + for (index, d) in enumerate(admin_titles) + if d["title"] == "Editor Title" + ), + None, ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_description_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["description"] == "Admin Description" - assert admin_response_data[0]["type"] == "Methods" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Editor Description", "type": "Methods"}], + e_editor_title = next( + ( + index + for (index, d) in enumerate(editor_titles) + if d["title"] == "Editor Title" + ), + None, ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_description_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["description"] == "Editor Description" - assert editor_response_data[0]["type"] == "Methods" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description", - json=[{"description": "Viewer Description", "type": "Methods"}], + v_editor_title = next( + ( + index + for (index, d) in enumerate(viewer_titles) + if d["title"] == "Editor Title" + ), + None, ) - assert viewer_response.status_code == 403 - - -def test_get_dataset_descriptions_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/description' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + # search for owner title index + own_title = next( + ( + index + for (index, d) in enumerate(owner_titles) + if d["title"] == "Owner Title" + ), + None, ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + a_own_title = next( + ( + index + for (index, d) in enumerate(admin_titles) + if d["title"] == "Owner Title" + ), + None, ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + e_own_title = next( + ( + index + for (index, d) in enumerate(editor_titles) + if d["title"] == "Owner Title" + ), + None, ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/description" + v_own_title = next( + ( + index + for (index, d) in enumerate(viewer_titles) + if d["title"] == "Owner Title" + ), + None, ) - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Dataset description is included in the responses - assert len(response_data) == 4 - assert len(admin_response_data) == 4 - assert len(editor_response_data) == 4 - assert len(viewer_response_data) == 4 + assert owner_titles[main_title]["title"] == "Dataset Title" + assert owner_titles[main_title]["type"] == "MainTitle" + assert owner_titles[own_title]["title"] == "Owner Title" + assert owner_titles[own_title]["type"] == "Subtitle" + assert owner_titles[admin_title]["title"] == "Admin Title" + assert owner_titles[admin_title]["type"] == "Subtitle" + assert owner_titles[editor_title]["title"] == "Editor Title" + assert owner_titles[editor_title]["type"] == "Subtitle" + + assert admin_titles[a_main_title]["title"] == "Dataset Title" + assert admin_titles[a_main_title]["type"] == "MainTitle" + assert admin_titles[a_own_title]["title"] == "Owner Title" + assert admin_titles[a_own_title]["type"] == "Subtitle" + assert admin_titles[a_admin_title]["title"] == "Admin Title" + assert admin_titles[a_admin_title]["type"] == "Subtitle" + assert admin_titles[a_editor_title]["title"] == "Editor Title" + assert admin_titles[a_editor_title]["type"] == "Subtitle" + + assert editor_titles[e_main_title]["title"] == "Dataset Title" + assert editor_titles[e_main_title]["type"] == "MainTitle" + assert editor_titles[e_own_title]["title"] == "Owner Title" + assert editor_titles[e_own_title]["type"] == "Subtitle" + assert editor_titles[e_admin_title]["title"] == "Admin Title" + assert editor_titles[e_admin_title]["type"] == "Subtitle" + assert editor_titles[e_editor_title]["title"] == "Editor Title" + assert editor_titles[e_editor_title]["type"] == "Subtitle" + + assert viewer_titles[v_main_title]["title"] == "Dataset Title" + assert viewer_titles[v_main_title]["type"] == "MainTitle" + assert viewer_titles[v_own_title]["title"] == "Owner Title" + assert viewer_titles[v_own_title]["type"] == "Subtitle" + assert viewer_titles[v_admin_title]["title"] == "Admin Title" + assert viewer_titles[v_admin_title]["type"] == "Subtitle" + assert viewer_titles[v_editor_title]["title"] == "Editor Title" + assert viewer_titles[v_editor_title]["type"] == "Subtitle" + + assert len(owner_descriptions) == 4 + assert len(admin_descriptions) == 4 + assert len(editor_descriptions) == 4 + assert len(viewer_descriptions) == 4 # seacrch for type abstract index main_descrip = next( - (index for (index, d) in enumerate(response_data) if d["type"] == "Abstract"), + ( + index + for (index, d) in enumerate(owner_descriptions) + if d["type"] == "Abstract" + ), None, ) a_main_descrip = next( ( index - for (index, d) in enumerate(admin_response_data) + for (index, d) in enumerate(admin_descriptions) if d["type"] == "Abstract" ), None, @@ -1503,7 +677,7 @@ def test_get_dataset_descriptions_metadata(clients): e_main_descrip = next( ( index - for (index, d) in enumerate(editor_response_data) + for (index, d) in enumerate(editor_descriptions) if d["type"] == "Abstract" ), None, @@ -1511,18 +685,17 @@ def test_get_dataset_descriptions_metadata(clients): v_main_descrip = next( ( index - for (index, d) in enumerate(viewer_response_data) + for (index, d) in enumerate(viewer_descriptions) if d["type"] == "Abstract" ), None, ) - # search for owner description # pylint: disable=line-too-long own_descrip = next( ( index - for (index, d) in enumerate(response_data) + for (index, d) in enumerate(owner_descriptions) if d["description"] == "Owner Description" ), None, @@ -1530,7 +703,7 @@ def test_get_dataset_descriptions_metadata(clients): a_own_descrip = next( ( index - for (index, d) in enumerate(admin_response_data) + for (index, d) in enumerate(admin_descriptions) if d["description"] == "Owner Description" ), None, @@ -1538,7 +711,7 @@ def test_get_dataset_descriptions_metadata(clients): e_own_descrip = next( ( index - for (index, d) in enumerate(editor_response_data) + for (index, d) in enumerate(editor_descriptions) if d["description"] == "Owner Description" ), None, @@ -1546,7 +719,7 @@ def test_get_dataset_descriptions_metadata(clients): v_own_descrip = next( ( index - for (index, d) in enumerate(viewer_response_data) + for (index, d) in enumerate(viewer_descriptions) if d["description"] == "Owner Description" ), None, @@ -1556,7 +729,7 @@ def test_get_dataset_descriptions_metadata(clients): admin_descrip = next( ( index - for (index, d) in enumerate(response_data) + for (index, d) in enumerate(owner_descriptions) if d["description"] == "Admin Description" ), None, @@ -1564,7 +737,7 @@ def test_get_dataset_descriptions_metadata(clients): a_admin_descrip = next( ( index - for (index, d) in enumerate(admin_response_data) + for (index, d) in enumerate(admin_descriptions) if d["description"] == "Admin Description" ), None, @@ -1572,7 +745,7 @@ def test_get_dataset_descriptions_metadata(clients): e_admin_descrip = next( ( index - for (index, d) in enumerate(editor_response_data) + for (index, d) in enumerate(editor_descriptions) if d["description"] == "Admin Description" ), None, @@ -1580,7 +753,7 @@ def test_get_dataset_descriptions_metadata(clients): v_admin_descrip = next( ( index - for (index, d) in enumerate(viewer_response_data) + for (index, d) in enumerate(viewer_descriptions) if d["description"] == "Admin Description" ), None, @@ -1590,7 +763,7 @@ def test_get_dataset_descriptions_metadata(clients): edit_descrip = next( ( index - for (index, d) in enumerate(response_data) + for (index, d) in enumerate(owner_descriptions) if d["description"] == "Editor Description" ), None, @@ -1598,7 +771,7 @@ def test_get_dataset_descriptions_metadata(clients): a_edit_descrip = next( ( index - for (index, d) in enumerate(admin_response_data) + for (index, d) in enumerate(admin_descriptions) if d["description"] == "Editor Description" ), None, @@ -1606,7 +779,7 @@ def test_get_dataset_descriptions_metadata(clients): e_edit_descrip = next( ( index - for (index, d) in enumerate(editor_response_data) + for (index, d) in enumerate(editor_descriptions) if d["description"] == "Editor Description" ), None, @@ -1614,49 +787,126 @@ def test_get_dataset_descriptions_metadata(clients): v_edit_descrip = next( ( index - for (index, d) in enumerate(viewer_response_data) + for (index, d) in enumerate(viewer_descriptions) if d["description"] == "Editor Description" ), None, ) - assert response_data[main_descrip]["description"] == "Dataset Description" - assert response_data[main_descrip]["type"] == "Abstract" - assert response_data[own_descrip]["description"] == "Owner Description" - assert response_data[own_descrip]["type"] == "Methods" - assert response_data[admin_descrip]["description"] == "Admin Description" - assert response_data[admin_descrip]["type"] == "Methods" - assert response_data[edit_descrip]["description"] == "Editor Description" - assert response_data[edit_descrip]["type"] == "Methods" - - assert admin_response_data[a_main_descrip]["description"] == "Dataset Description" - assert admin_response_data[a_main_descrip]["type"] == "Abstract" - assert admin_response_data[a_own_descrip]["description"] == "Owner Description" - assert admin_response_data[a_own_descrip]["type"] == "Methods" - assert admin_response_data[a_admin_descrip]["description"] == "Admin Description" - assert admin_response_data[a_admin_descrip]["type"] == "Methods" - assert admin_response_data[a_edit_descrip]["description"] == "Editor Description" - assert admin_response_data[a_edit_descrip]["type"] == "Methods" - - assert editor_response_data[e_main_descrip]["description"] == "Dataset Description" - assert editor_response_data[e_main_descrip]["type"] == "Abstract" - assert editor_response_data[e_own_descrip]["description"] == "Owner Description" - assert editor_response_data[e_own_descrip]["type"] == "Methods" - assert editor_response_data[e_admin_descrip]["description"] == "Admin Description" - assert editor_response_data[e_admin_descrip]["type"] == "Methods" - assert editor_response_data[e_edit_descrip]["description"] == "Editor Description" - assert editor_response_data[e_edit_descrip]["type"] == "Methods" - - assert viewer_response_data[v_main_descrip]["description"] == "Dataset Description" - assert viewer_response_data[v_main_descrip]["type"] == "Abstract" - assert viewer_response_data[v_own_descrip]["description"] == "Owner Description" - assert viewer_response_data[v_own_descrip]["type"] == "Methods" - assert viewer_response_data[v_admin_descrip]["description"] == "Admin Description" - assert viewer_response_data[v_admin_descrip]["type"] == "Methods" - assert viewer_response_data[v_edit_descrip]["description"] == "Editor Description" - assert viewer_response_data[v_edit_descrip]["type"] == "Methods" + assert owner_descriptions[main_descrip]["description"] == "Dataset Description" + assert owner_descriptions[main_descrip]["type"] == "Abstract" + assert owner_descriptions[own_descrip]["description"] == "Owner Description" + assert owner_descriptions[own_descrip]["type"] == "Methods" + assert owner_descriptions[admin_descrip]["description"] == "Admin Description" + assert owner_descriptions[admin_descrip]["type"] == "Methods" + assert owner_descriptions[edit_descrip]["description"] == "Editor Description" + assert owner_descriptions[edit_descrip]["type"] == "Methods" + + assert admin_descriptions[a_main_descrip]["description"] == "Dataset Description" + assert admin_descriptions[a_main_descrip]["type"] == "Abstract" + assert admin_descriptions[a_own_descrip]["description"] == "Owner Description" + assert admin_descriptions[a_own_descrip]["type"] == "Methods" + assert admin_descriptions[a_admin_descrip]["description"] == "Admin Description" + assert admin_descriptions[a_admin_descrip]["type"] == "Methods" + assert admin_descriptions[a_edit_descrip]["description"] == "Editor Description" + assert admin_descriptions[a_edit_descrip]["type"] == "Methods" + + assert editor_descriptions[e_main_descrip]["description"] == "Dataset Description" + assert editor_descriptions[e_main_descrip]["type"] == "Abstract" + assert editor_descriptions[e_own_descrip]["description"] == "Owner Description" + assert editor_descriptions[e_own_descrip]["type"] == "Methods" + assert editor_descriptions[e_admin_descrip]["description"] == "Admin Description" + assert editor_descriptions[e_admin_descrip]["type"] == "Methods" + assert editor_descriptions[e_edit_descrip]["description"] == "Editor Description" + assert editor_descriptions[e_edit_descrip]["type"] == "Methods" + + assert viewer_descriptions[v_main_descrip]["description"] == "Dataset Description" + assert viewer_descriptions[v_main_descrip]["type"] == "Abstract" + assert viewer_descriptions[v_own_descrip]["description"] == "Owner Description" + assert viewer_descriptions[v_own_descrip]["type"] == "Methods" + assert viewer_descriptions[v_admin_descrip]["description"] == "Admin Description" + assert viewer_descriptions[v_admin_descrip]["type"] == "Methods" + assert viewer_descriptions[v_edit_descrip]["description"] == "Editor Description" + assert viewer_descriptions[v_edit_descrip]["type"] == "Methods" + + assert len(owner_dates) == 3 + assert len(admin_dates) == 3 + assert len(editor_dates) == 3 + assert len(viewer_dates) == 3 + + assert owner_dates[0]["date"] == 20210101 + assert owner_dates[0]["type"] == "Accepted" + assert owner_dates[0]["information"] == "Info" + assert owner_dates[1]["date"] == 20210102 + assert owner_dates[1]["type"] == "Accepted" + assert owner_dates[1]["information"] == "Info" + assert owner_dates[2]["date"] == 20210103 + assert owner_dates[2]["type"] == "Accepted" + + assert admin_dates[0]["date"] == 20210101 + assert admin_dates[0]["type"] == "Accepted" + assert admin_dates[0]["information"] == "Info" + assert admin_dates[1]["date"] == 20210102 + assert admin_dates[1]["type"] == "Accepted" + assert admin_dates[1]["information"] == "Info" + assert admin_dates[2]["date"] == 20210103 + assert admin_dates[2]["type"] == "Accepted" + + assert editor_dates[0]["date"] == 20210101 + assert editor_dates[0]["type"] == "Accepted" + assert editor_dates[0]["information"] == "Info" + assert editor_dates[1]["date"] == 20210102 + assert editor_dates[1]["type"] == "Accepted" + assert editor_dates[1]["information"] == "Info" + assert editor_dates[2]["date"] == 20210103 + assert editor_dates[2]["type"] == "Accepted" + + assert viewer_dates[0]["date"] == 20210101 + assert viewer_dates[0]["type"] == "Accepted" + assert viewer_dates[0]["information"] == "Info" + assert viewer_dates[1]["date"] == 20210102 + assert viewer_dates[1]["type"] == "Accepted" + assert viewer_dates[1]["information"] == "Info" + assert viewer_dates[2]["date"] == 20210103 + assert viewer_dates[2]["type"] == "Accepted" + + +# ------------------- DELETE TITLE METADATA ------------------- # +def test_delete_dataset_title_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + title metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + title_id = pytest.global_dataset_title_id + admin_title_id = pytest.global_dataset_title_id_admin + editor_title_id = pytest.global_dataset_title_id_editor + + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{admin_title_id}" + ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{editor_title_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 +# ------------------- DELETE DESCRIPTION METADATA ------------------- # def test_delete_dataset_description_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID @@ -1692,751 +942,1890 @@ def test_delete_dataset_description_metadata(clients): assert editor_response.status_code == 204 -# ------------------- DATASET HEALTHSHEET MOTIVATION METADATA ------------------- # -def test_put_healthsheet_motivation_dataset_metadata(clients): +# ------------------- DELETE DATE METADATA ------------------- # +def test_delete_dataset_date_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/date' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset date metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + date_id = pytest.global_dataset_date_id + admin_date_id = pytest.global_dataset_date_id_admin + editor_date_id = pytest.global_dataset_date_id_editor - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + ) + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{date_id}" + ) + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{admin_date_id}" ) + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/date/{editor_date_id}" + ) + + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 + + +# ------------------- DATASET TEAM METADATA ------------------- # +def test_post_dataset_team_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/team' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + team metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, + ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + creators = response_data["creators"] + contributors = response_data["contributors"] + funders = response_data["funders"] + managing_organization = response_data["managing_organization"] + + pytest.global_dataset_funder_id = funders[0]["id"] + pytest.global_dataset_creator_id = creators[0]["id"] + pytest.global_dataset_contributor_id = contributors[0]["id"] + + assert creators[0]["given_name"] == "Given Name here" + assert creators[0]["family_name"] == "Family Name here" + assert creators[0]["name_type"] == "Personal" + assert creators[0]["name_identifier"] == "Name identifier" + assert creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[0]["creator"] is True + assert creators[0]["affiliations"][0]["name"] == "Test" + assert creators[0]["affiliations"][0]["identifier"] == "yes" + assert creators[0]["affiliations"][0]["scheme"] == "uh" + assert creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert funders[0]["name"] == "Name" + assert funders[0]["award_number"] == "award number" + assert funders[0]["award_title"] == "Award Title" + assert funders[0]["award_uri"] == "Award URI" + assert funders[0]["identifier"] == "Identifier" + assert funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[0]["identifier_type"] == "Identifier Type" + + assert contributors[0]["given_name"] == "Given Name here" + assert contributors[0]["family_name"] == "Family Name here" + assert contributors[0]["name_type"] == "Personal" + assert contributors[0]["name_identifier"] == "Name identifier" + assert contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[0]["creator"] is False + assert contributors[0]["contributor_type"] == "Con Type" + assert contributors[0]["affiliations"][0]["name"] == "Test" + assert contributors[0]["affiliations"][0]["identifier"] == "yes" + assert contributors[0]["affiliations"][0]["scheme"] == "uh" + assert contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert managing_organization["name"] == "Managing Organization Name" + assert managing_organization["identifier"] == "identifier" + assert managing_organization["identifier_scheme"] == "identifier scheme" + assert managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Admin Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Admin Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "admin Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Admin Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, ) + # Add a one second delay to prevent duplicate timestamps + sleep(1) + assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) + + admin_creators = admin_response_data["creators"] + admin_managing_organization = admin_response_data["managing_organization"] + admin_funders = admin_response_data["funders"] + admin_contributors = admin_response_data["contributors"] + + pytest.global_dataset_funder_id_admin = admin_funders[0]["id"] + pytest.global_dataset_creator_id_admin = admin_creators[0]["id"] + pytest.global_dataset_contributor_id_admin = admin_contributors[0]["id"] + + assert admin_creators[0]["given_name"] == "Admin Given Name here" + assert admin_creators[0]["family_name"] == "Family Name here" + assert admin_creators[0]["name_type"] == "Personal" + assert admin_creators[0]["name_identifier"] == "Name identifier" + assert admin_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[0]["creator"] is True + assert admin_creators[0]["affiliations"][0]["name"] == "Test" + assert admin_creators[0]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[0]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_funders[0]["name"] == "Admin Name" + assert admin_funders[0]["award_number"] == "award number" + assert admin_funders[0]["award_title"] == "Award Title" + assert admin_funders[0]["award_uri"] == "Award URI" + assert admin_funders[0]["identifier"] == "Identifier" + assert admin_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[0]["identifier_type"] == "Identifier Type" + + assert admin_contributors[0]["given_name"] == "Admin Given Name here" + assert admin_contributors[0]["family_name"] == "Family Name here" + assert admin_contributors[0]["name_type"] == "Personal" + assert admin_contributors[0]["name_identifier"] == "Name identifier" + assert admin_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[0]["creator"] is False + assert admin_contributors[0]["contributor_type"] == "Con Type" + assert admin_contributors[0]["affiliations"][0]["name"] == "Test" + assert admin_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_managing_organization["name"] == "admin Managing Organization Name" + assert admin_managing_organization["identifier"] == "identifier" + assert admin_managing_organization["identifier_scheme"] == "identifier scheme" assert ( - admin_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' + admin_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" ) - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Editor Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Editor Given Name here", + "family_name": "Editor Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Editor Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, ) + assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) + + editor_creators = editor_response_data["creators"] + editor_managing_organization = editor_response_data["managing_organization"] + editor_funders = editor_response_data["funders"] + editor_contributors = editor_response_data["contributors"] + + pytest.global_dataset_funder_id_editor = editor_funders[0]["id"] + pytest.global_dataset_creator_id_editor = editor_creators[0]["id"] + pytest.global_dataset_contributor_id_editor = editor_contributors[0]["id"] + + assert editor_creators[0]["given_name"] == "Editor Given Name here" + assert editor_creators[0]["family_name"] == "Family Name here" + assert editor_creators[0]["name_type"] == "Personal" + assert editor_creators[0]["name_identifier"] == "Name identifier" + assert editor_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[0]["creator"] is True + assert editor_creators[0]["affiliations"][0]["name"] == "Test" + assert editor_creators[0]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[0]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_contributors[0]["given_name"] == "Editor Given Name here" + assert editor_contributors[0]["family_name"] == "Editor Family Name here" + assert editor_contributors[0]["name_type"] == "Personal" + assert editor_contributors[0]["name_identifier"] == "Name identifier" + assert editor_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[0]["creator"] is False + assert editor_contributors[0]["contributor_type"] == "Con Type" + assert editor_contributors[0]["affiliations"][0]["name"] == "Test" + assert editor_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_funders[0]["name"] == "Editor Name" + assert editor_funders[0]["award_number"] == "award number" + assert editor_funders[0]["award_title"] == "Award Title" + assert editor_funders[0]["award_uri"] == "Award URI" + assert editor_funders[0]["identifier"] == "Identifier" assert ( - editor_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' + editor_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + ) # pylint: disable=line-too-long + assert editor_funders[0]["identifier_type"] == "Identifier Type" + + assert editor_managing_organization["name"] == "editor Managing Organization Name" + assert editor_managing_organization["identifier"] == "identifier" + assert editor_managing_organization["identifier_scheme"] == "identifier scheme" + assert ( + editor_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" ) - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", - json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Viewer Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Viewer Given Name here", + "family_name": "Viewer Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Viewer Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, ) + assert viewer_response.status_code == 403 -def test_get_dataset_healthsheet_motivation_metadata(clients): +def test_get_dataset_team_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation' + When the '/study/{study_id}/dataset/{dataset_id}/metadata/team' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - description metadata content + team metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" + ) + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" + ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team" ) + + assert response.status_code == 200 assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) + + creators = response_data["creators"] + contributors = response_data["contributors"] + funders = response_data["funders"] + managing_organization = response_data["managing_organization"] + + admin_creators = admin_response_data["creators"] + admin_managing_organization = admin_response_data["managing_organization"] + admin_funders = admin_response_data["funders"] + admin_contributors = admin_response_data["contributors"] + + editor_creators = editor_response_data["creators"] + editor_managing_organization = editor_response_data["managing_organization"] + editor_funders = editor_response_data["funders"] + editor_contributors = editor_response_data["contributors"] + + viewer_creators = viewer_response_data["creators"] + viewer_managing_organization = viewer_response_data["managing_organization"] + viewer_funders = viewer_response_data["funders"] + + assert len(funders) == 3 + assert len(admin_funders) == 3 + assert len(editor_funders) == 3 + assert len(viewer_funders) == 3 + + assert funders[0]["name"] == "Name" + assert funders[0]["award_number"] == "award number" + assert funders[0]["award_title"] == "Award Title" + assert funders[0]["award_uri"] == "Award URI" + assert funders[0]["identifier"] == "Identifier" + assert funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[0]["identifier_type"] == "Identifier Type" + assert funders[1]["name"] == "Admin Name" + assert funders[1]["award_number"] == "award number" + assert funders[1]["award_title"] == "Award Title" + assert funders[1]["award_uri"] == "Award URI" + assert funders[1]["identifier"] == "Identifier" + assert funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[1]["identifier_type"] == "Identifier Type" + assert funders[2]["name"] == "Editor Name" + assert funders[2]["award_number"] == "award number" + assert funders[2]["award_title"] == "Award Title" + assert funders[2]["award_uri"] == "Award URI" + assert funders[2]["identifier"] == "Identifier" + assert funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert funders[2]["identifier_type"] == "Identifier Type" + + assert admin_funders[0]["name"] == "Name" + assert admin_funders[0]["award_number"] == "award number" + assert admin_funders[0]["award_title"] == "Award Title" + assert admin_funders[0]["award_uri"] == "Award URI" + assert admin_funders[0]["identifier"] == "Identifier" + assert admin_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[0]["identifier_type"] == "Identifier Type" + assert admin_funders[1]["name"] == "Admin Name" + assert admin_funders[1]["award_number"] == "award number" + assert admin_funders[1]["award_title"] == "Award Title" + assert admin_funders[1]["award_uri"] == "Award URI" + assert admin_funders[1]["identifier"] == "Identifier" + assert admin_funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[1]["identifier_type"] == "Identifier Type" + assert admin_funders[2]["name"] == "Editor Name" + assert admin_funders[2]["award_number"] == "award number" + assert admin_funders[2]["award_title"] == "Award Title" + assert admin_funders[2]["award_uri"] == "Award URI" + assert admin_funders[2]["identifier"] == "Identifier" + assert admin_funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert admin_funders[2]["identifier_type"] == "Identifier Type" + + assert editor_funders[0]["name"] == "Name" + assert editor_funders[0]["award_number"] == "award number" + assert editor_funders[0]["award_title"] == "Award Title" + assert editor_funders[0]["award_uri"] == "Award URI" + assert editor_funders[0]["identifier"] == "Identifier" + assert editor_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_funders[0]["identifier_type"] == "Identifier Type" + assert editor_funders[1]["name"] == "Admin Name" + assert editor_funders[1]["award_number"] == "award number" + assert editor_funders[1]["award_title"] == "Award Title" + assert editor_funders[1]["award_uri"] == "Award URI" + assert editor_funders[1]["identifier"] == "Identifier" + assert editor_funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_funders[1]["identifier_type"] == "Identifier Type" + assert editor_funders[2]["name"] == "Editor Name" + assert editor_funders[2]["award_number"] == "award number" + assert editor_funders[2]["award_title"] == "Award Title" + assert editor_funders[2]["award_uri"] == "Award URI" + assert editor_funders[2]["identifier"] == "Identifier" + assert editor_funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert editor_funders[2]["identifier_type"] == "Identifier Type" + + assert viewer_funders[0]["name"] == "Name" + assert viewer_funders[0]["award_number"] == "award number" + assert viewer_funders[0]["award_title"] == "Award Title" + assert viewer_funders[0]["award_uri"] == "Award URI" + assert viewer_funders[0]["identifier"] == "Identifier" + assert viewer_funders[0]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_funders[0]["identifier_type"] == "Identifier Type" + assert viewer_funders[1]["name"] == "Admin Name" + assert viewer_funders[1]["award_number"] == "award number" + assert viewer_funders[1]["award_title"] == "Award Title" + assert viewer_funders[1]["award_uri"] == "Award URI" + assert viewer_funders[1]["identifier"] == "Identifier" + assert viewer_funders[1]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_funders[1]["identifier_type"] == "Identifier Type" + assert viewer_funders[2]["name"] == "Editor Name" + assert viewer_funders[2]["award_number"] == "award number" + assert viewer_funders[2]["award_title"] == "Award Title" + assert viewer_funders[2]["award_uri"] == "Award URI" + assert viewer_funders[2]["identifier"] == "Identifier" + assert viewer_funders[2]["identifier_scheme_uri"] == "Identifier Scheme URI" + assert viewer_funders[2]["identifier_type"] == "Identifier Type" + + assert len(creators) == 3 + assert len(admin_creators) == 3 + assert len(editor_creators) == 3 + assert len(viewer_creators) == 3 + + assert creators[0]["id"] == pytest.global_dataset_creator_id + assert creators[0]["given_name"] == "Given Name here" + assert creators[0]["family_name"] == "Family Name here" + assert creators[0]["name_type"] == "Personal" + assert creators[0]["name_identifier"] == "Name identifier" + assert creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[0]["creator"] is True + assert creators[0]["affiliations"][0]["name"] == "Test" + assert creators[0]["affiliations"][0]["identifier"] == "yes" + assert creators[0]["affiliations"][0]["scheme"] == "uh" + assert creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert creators[1]["id"] == pytest.global_dataset_creator_id_admin + assert creators[1]["given_name"] == "Admin Given Name here" + assert creators[1]["family_name"] == "Family Name here" + assert creators[1]["name_type"] == "Personal" + assert creators[1]["name_identifier"] == "Name identifier" + assert creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[1]["creator"] is True + assert creators[1]["affiliations"][0]["name"] == "Test" + assert creators[1]["affiliations"][0]["identifier"] == "yes" + assert creators[1]["affiliations"][0]["scheme"] == "uh" + assert creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert creators[2]["id"] == pytest.global_dataset_creator_id_editor + assert creators[2]["given_name"] == "Editor Given Name here" + assert creators[2]["family_name"] == "Family Name here" + assert creators[2]["name_type"] == "Personal" + assert creators[2]["name_identifier"] == "Name identifier" + assert creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert creators[2]["creator"] is True + assert creators[2]["affiliations"][0]["name"] == "Test" + assert creators[2]["affiliations"][0]["identifier"] == "yes" + assert creators[2]["affiliations"][0]["scheme"] == "uh" + assert creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_creators[0]["given_name"] == "Given Name here" + assert admin_creators[0]["family_name"] == "Family Name here" + assert admin_creators[0]["name_type"] == "Personal" + assert admin_creators[0]["name_identifier"] == "Name identifier" + assert admin_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[0]["creator"] is True + assert admin_creators[0]["affiliations"][0]["name"] == "Test" + assert admin_creators[0]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[0]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_creators[1]["given_name"] == "Admin Given Name here" + assert admin_creators[1]["family_name"] == "Family Name here" + assert admin_creators[1]["name_type"] == "Personal" + assert admin_creators[1]["name_identifier"] == "Name identifier" + assert admin_creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[1]["creator"] is True + assert admin_creators[1]["affiliations"][0]["name"] == "Test" + assert admin_creators[1]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[1]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_creators[2]["given_name"] == "Editor Given Name here" + assert admin_creators[2]["family_name"] == "Family Name here" + assert admin_creators[2]["name_type"] == "Personal" + assert admin_creators[2]["name_identifier"] == "Name identifier" + assert admin_creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_creators[2]["creator"] is True + assert admin_creators[2]["affiliations"][0]["name"] == "Test" + assert admin_creators[2]["affiliations"][0]["identifier"] == "yes" + assert admin_creators[2]["affiliations"][0]["scheme"] == "uh" + assert admin_creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_creators[0]["given_name"] == "Given Name here" + assert editor_creators[0]["family_name"] == "Family Name here" + assert editor_creators[0]["name_type"] == "Personal" + assert editor_creators[0]["name_identifier"] == "Name identifier" + assert editor_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[0]["creator"] is True + assert editor_creators[0]["affiliations"][0]["name"] == "Test" + assert editor_creators[0]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[0]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_creators[1]["given_name"] == "Admin Given Name here" + assert editor_creators[1]["family_name"] == "Family Name here" + assert editor_creators[1]["name_type"] == "Personal" + assert editor_creators[1]["name_identifier"] == "Name identifier" + assert editor_creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[1]["creator"] is True + assert editor_creators[1]["affiliations"][0]["name"] == "Test" + assert editor_creators[1]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[1]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_creators[2]["given_name"] == "Editor Given Name here" + assert editor_creators[2]["family_name"] == "Family Name here" + assert editor_creators[2]["name_type"] == "Personal" + assert editor_creators[2]["name_identifier"] == "Name identifier" + assert editor_creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_creators[2]["creator"] is True + assert editor_creators[2]["affiliations"][0]["name"] == "Test" + assert editor_creators[2]["affiliations"][0]["identifier"] == "yes" + assert editor_creators[2]["affiliations"][0]["scheme"] == "uh" + assert editor_creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert viewer_creators[0]["given_name"] == "Given Name here" + assert viewer_creators[0]["family_name"] == "Family Name here" + assert viewer_creators[0]["name_type"] == "Personal" + assert viewer_creators[0]["name_identifier"] == "Name identifier" + assert viewer_creators[0]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_creators[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_creators[0]["creator"] is True + assert viewer_creators[0]["affiliations"][0]["name"] == "Test" + assert viewer_creators[0]["affiliations"][0]["identifier"] == "yes" + assert viewer_creators[0]["affiliations"][0]["scheme"] == "uh" + assert viewer_creators[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert viewer_creators[1]["given_name"] == "Admin Given Name here" + assert viewer_creators[1]["family_name"] == "Family Name here" + assert viewer_creators[1]["name_type"] == "Personal" + assert viewer_creators[1]["name_identifier"] == "Name identifier" + assert viewer_creators[1]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_creators[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_creators[1]["creator"] is True + assert viewer_creators[1]["affiliations"][0]["name"] == "Test" + assert viewer_creators[1]["affiliations"][0]["identifier"] == "yes" + assert viewer_creators[1]["affiliations"][0]["scheme"] == "uh" + assert viewer_creators[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert viewer_creators[2]["given_name"] == "Editor Given Name here" + assert viewer_creators[2]["family_name"] == "Family Name here" + assert viewer_creators[2]["name_type"] == "Personal" + assert viewer_creators[2]["name_identifier"] == "Name identifier" + assert viewer_creators[2]["name_identifier_scheme"] == "Name Scheme ID" + assert viewer_creators[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert viewer_creators[2]["creator"] is True + assert viewer_creators[2]["affiliations"][0]["name"] == "Test" + assert viewer_creators[2]["affiliations"][0]["identifier"] == "yes" + assert viewer_creators[2]["affiliations"][0]["scheme"] == "uh" + assert viewer_creators[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert contributors[0]["given_name"] == "Given Name here" + assert contributors[0]["family_name"] == "Family Name here" + assert contributors[0]["name_type"] == "Personal" + assert contributors[0]["name_identifier"] == "Name identifier" + assert contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[0]["creator"] is False + assert contributors[0]["contributor_type"] == "Con Type" + assert contributors[0]["affiliations"][0]["name"] == "Test" + assert contributors[0]["affiliations"][0]["identifier"] == "yes" + assert contributors[0]["affiliations"][0]["scheme"] == "uh" + assert contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert contributors[1]["given_name"] == "Admin Given Name here" + assert contributors[1]["family_name"] == "Family Name here" + assert contributors[1]["name_type"] == "Personal" + assert contributors[1]["name_identifier"] == "Name identifier" + assert contributors[1]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[1]["creator"] is False + assert contributors[1]["contributor_type"] == "Con Type" + assert contributors[1]["affiliations"][0]["name"] == "Test" + assert contributors[1]["affiliations"][0]["identifier"] == "yes" + assert contributors[1]["affiliations"][0]["scheme"] == "uh" + assert contributors[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert contributors[2]["given_name"] == "Editor Given Name here" + assert contributors[2]["family_name"] == "Editor Family Name here" + assert contributors[2]["name_type"] == "Personal" + assert contributors[2]["name_identifier"] == "Name identifier" + assert contributors[2]["name_identifier_scheme"] == "Name Scheme ID" + assert contributors[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert contributors[2]["creator"] is False + assert contributors[2]["contributor_type"] == "Con Type" + assert contributors[2]["affiliations"][0]["name"] == "Test" + assert contributors[2]["affiliations"][0]["identifier"] == "yes" + assert contributors[2]["affiliations"][0]["scheme"] == "uh" + assert contributors[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert admin_contributors[0]["given_name"] == "Given Name here" + assert admin_contributors[0]["family_name"] == "Family Name here" + assert admin_contributors[0]["name_type"] == "Personal" + assert admin_contributors[0]["name_identifier"] == "Name identifier" + assert admin_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[0]["creator"] is False + assert admin_contributors[0]["contributor_type"] == "Con Type" + assert admin_contributors[0]["affiliations"][0]["name"] == "Test" + assert admin_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_contributors[1]["given_name"] == "Admin Given Name here" + assert admin_contributors[1]["family_name"] == "Family Name here" + assert admin_contributors[1]["name_type"] == "Personal" + assert admin_contributors[1]["name_identifier"] == "Name identifier" + assert admin_contributors[1]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[1]["creator"] is False + assert admin_contributors[1]["contributor_type"] == "Con Type" + assert admin_contributors[1]["affiliations"][0]["name"] == "Test" + assert admin_contributors[1]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[1]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert admin_contributors[2]["given_name"] == "Editor Given Name here" + assert admin_contributors[2]["family_name"] == "Editor Family Name here" + assert admin_contributors[2]["name_type"] == "Personal" + assert admin_contributors[2]["name_identifier"] == "Name identifier" + assert admin_contributors[2]["name_identifier_scheme"] == "Name Scheme ID" + assert admin_contributors[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert admin_contributors[2]["creator"] is False + assert admin_contributors[2]["contributor_type"] == "Con Type" + assert admin_contributors[2]["affiliations"][0]["name"] == "Test" + assert admin_contributors[2]["affiliations"][0]["identifier"] == "yes" + assert admin_contributors[2]["affiliations"][0]["scheme"] == "uh" + assert admin_contributors[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert editor_contributors[0]["given_name"] == "Given Name here" + assert editor_contributors[0]["family_name"] == "Family Name here" + assert editor_contributors[0]["name_type"] == "Personal" + assert editor_contributors[0]["name_identifier"] == "Name identifier" + assert editor_contributors[0]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[0]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[0]["creator"] is False + assert editor_contributors[0]["contributor_type"] == "Con Type" + assert editor_contributors[0]["affiliations"][0]["name"] == "Test" + assert editor_contributors[0]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[0]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[0]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_contributors[1]["given_name"] == "Admin Given Name here" + assert editor_contributors[1]["family_name"] == "Family Name here" + assert editor_contributors[1]["name_type"] == "Personal" + assert editor_contributors[1]["name_identifier"] == "Name identifier" + assert editor_contributors[1]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[1]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[1]["creator"] is False + assert editor_contributors[1]["contributor_type"] == "Con Type" + assert editor_contributors[1]["affiliations"][0]["name"] == "Test" + assert editor_contributors[1]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[1]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[1]["affiliations"][0]["scheme_uri"] == "scheme uri" + assert editor_contributors[2]["given_name"] == "Editor Given Name here" + assert editor_contributors[2]["family_name"] == "Editor Family Name here" + assert editor_contributors[2]["name_type"] == "Personal" + assert editor_contributors[2]["name_identifier"] == "Name identifier" + assert editor_contributors[2]["name_identifier_scheme"] == "Name Scheme ID" + assert editor_contributors[2]["name_identifier_scheme_uri"] == "Name ID Scheme URI" + assert editor_contributors[2]["creator"] is False + assert editor_contributors[2]["contributor_type"] == "Con Type" + assert editor_contributors[2]["affiliations"][0]["name"] == "Test" + assert editor_contributors[2]["affiliations"][0]["identifier"] == "yes" + assert editor_contributors[2]["affiliations"][0]["scheme"] == "uh" + assert editor_contributors[2]["affiliations"][0]["scheme_uri"] == "scheme uri" + + assert managing_organization["name"] == "editor Managing Organization Name" + assert managing_organization["identifier"] == "identifier" + assert managing_organization["identifier_scheme"] == "identifier scheme" + assert managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" + + assert admin_managing_organization["name"] == "editor Managing Organization Name" + assert admin_managing_organization["identifier"] == "identifier" + assert admin_managing_organization["identifier_scheme"] == "identifier scheme" assert ( - admin_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' + admin_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + assert editor_managing_organization["name"] == "editor Managing Organization Name" + assert editor_managing_organization["identifier"] == "identifier" + assert editor_managing_organization["identifier_scheme"] == "identifier scheme" + assert ( + editor_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) + + assert viewer_managing_organization["name"] == "editor Managing Organization Name" + assert viewer_managing_organization["identifier"] == "identifier" + assert viewer_managing_organization["identifier_scheme"] == "identifier scheme" assert ( - editor_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' + viewer_managing_organization["identifier_scheme_uri"] == "identifier scheme_uri" ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + +# ------------------- DELETE CONTRIBUTOR METADATA ------------------- # +def test_delete_dataset_contributor_metadata(clients): + """ + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/contributor' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset contributor metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id + contributor_id = pytest.global_dataset_contributor_id + admin_contributor_id = pytest.global_dataset_contributor_id_admin + editor_contributor_id = pytest.global_dataset_contributor_id_editor + + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["motivation"] - == '[{"id":1,"question":"For","response":"new"}]' + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{contributor_id}" + ) + # pylint: disable=line-too-long + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{admin_contributor_id}" + ) + # pylint: disable=line-too-long + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor/{editor_contributor_id}" ) - # Editor was the last successful PUT request, so the response data should match + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -# # ------------------- DATASET HEALTHSHEET COMPOSITION METADATA ------------------- # -def test_put_healthsheet_composition_dataset_metadata(clients): +# ------------------- DELETE CREATOR METADATA ------------------- # +def test_delete_dataset_creator_metadata(clients): """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/composition' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet composition metadata content + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/creator' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset creator metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + creator_id = pytest.global_dataset_creator_id + admin_creator_id = pytest.global_dataset_creator_id_admin + editor_creator_id = pytest.global_dataset_creator_id_editor - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + # Verify Viewer cannot delete + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{creator_id}" ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{admin_creator_id}" ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/creator/{editor_creator_id}" ) - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", - json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, - ) assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -def test_get_dataset_healthsheet_composition_metadata(clients): +# ------------------- DELETE DATASET FUNDER METADATA ------------------- # +def test_delete_dataset_funder_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/composition' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - description metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + endpoint is requested (DELETE) + Then check that the response is valid and deletes the dataset + funder metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + funder_id = pytest.global_dataset_funder_id + a_funder_id = pytest.global_dataset_funder_id_admin + e_funder_id = pytest.global_dataset_funder_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{a_funder_id}" ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["composition"] - == '[{"id":1,"question":"For","response":"new"}]' + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{e_funder_id}" ) - # Editor was the last successful PUT request, so the response data should match + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -# ------------------- DATASET HEALTHSHEET COLLECTION METADATA ------------------- # -def test_put_healthsheet_collection_dataset_metadata(clients): +# ------------------- OTHER METADATA ------------------- # +def test_put_other_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/collection' + When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (PUT) Then check that the response is valid and updates the dataset - healthsheet collection metadata content + other metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, ) + assert response.status_code == 200 response_data = json.loads(response.data) - assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' + + assert response_data["acknowledgement"] == "Yes" + assert response_data["language"] == "English" + + assert response_data["size"] == ["Size"] + assert response_data["format"] == ["Format"] + assert response_data["standards_followed"] == "Standards Followed" admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, ) + assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) + + assert admin_response_data["acknowledgement"] == "Yes" + assert admin_response_data["language"] == "English" + assert admin_response_data["size"] == ["Size"] + assert admin_response_data["format"] == ["Format"] + assert admin_response_data["standards_followed"] == "Standards Followed" editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, ) + assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) + + assert editor_response_data["acknowledgement"] == "Yes" + assert editor_response_data["language"] == "English" + assert editor_response_data["size"] == ["Size"] + assert editor_response_data["format"] == ["Format"] + assert editor_response_data["standards_followed"] == "Standards Followed" viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", - json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + f"/study/{study_id}/dataset/{dataset_id}/metadata/other", + json={ + "acknowledgement": "Yes", + "language": "English", + "resource_type": "Resource Type", + "size": ["Size"], + "format": ["Format"], + "standards_followed": "Standards Followed", + }, ) assert viewer_response.status_code == 403 -def test_get_dataset_healthsheet_collection_metadata(clients): +def test_get_other_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/collection' + When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - description metadata content + other metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" ) + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + ) + + assert response.status_code == 200 + assert admin_response.status_code == 200 assert editor_response.status_code == 200 + # assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) + viewer_response_data = json.loads(viewer_response.data) + + # Editor was the last to update the metadata successfully so + # the response should reflect that + assert response_data["acknowledgement"] == "Yes" + assert response_data["language"] == "English" + # assert response_data["resource_type"] == "Editor Resource Type" + assert response_data["size"] == ["Size"] + assert response_data["format"] == ["Format"] + assert response_data["standards_followed"] == "Standards Followed" + + assert admin_response_data["acknowledgement"] == "Yes" + assert admin_response_data["language"] == "English" + # assert admin_response_data["resource_type"] == "Editor Resource Type" + assert admin_response_data["size"] == ["Size"] + assert admin_response_data["format"] == ["Format"] + assert admin_response_data["standards_followed"] == "Standards Followed" - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["collection"] - == '[{"id":1,"question":"For","response":"new"}]' - ) + assert editor_response_data["acknowledgement"] == "Yes" + assert editor_response_data["language"] == "English" + # assert editor_response_data["resource_type"] == "Editor Resource Type" + assert editor_response_data["size"] == ["Size"] + assert editor_response_data["format"] == ["Format"] + assert editor_response_data["standards_followed"] == "Standards Followed" - # Editor was the last successful PUT request, so the response data should match + assert viewer_response_data["acknowledgement"] == "Yes" + assert viewer_response_data["language"] == "English" + assert viewer_response_data["size"] == ["Size"] + assert viewer_response_data["format"] == ["Format"] + assert viewer_response_data["standards_followed"] == "Standards Followed" -# ------------------- DATASET HEALTHSHEET PREPROCESSING METADATA ------------------- # -def test_put_healthsheet_preprocessing_dataset_metadata(clients): +# ------------------- RELATED IDENTIFIER METADATA ------------------- # +def test_post_dataset_related_identifier_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet preprocessing metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier' + endpoint is requested (POST) + Then check that the response is valid and creates the dataset + related identifier metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], ) - assert response.status_code == 200 + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert response.status_code == 201 response_data = json.loads(response.data) - assert ( - response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' - ) - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + pytest.global_dataset_related_identifier_id = response_data[0]["id"] + + assert response_data[0]["identifier"] == "test identifier" + assert response_data[0]["identifier_type"] == "test identifier type" + assert response_data[0]["relation_type"] == "test relation type" + assert response_data[0]["related_metadata_scheme"] == "test" + assert response_data[0]["scheme_uri"] == "test" + assert response_data[0]["scheme_type"] == "test" + assert response_data[0]["resource_type"] == "test" + + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "admin test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], ) - assert admin_response.status_code == 200 + # Add a one second delay to prevent duplicate timestamps + sleep(1) + + assert admin_response.status_code == 201 admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' - ) + pytest.global_dataset_related_identifier_id_admin = admin_response_data[0]["id"] - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + assert admin_response_data[0]["identifier"] == "admin test identifier" + assert admin_response_data[0]["identifier_type"] == "test identifier type" + assert admin_response_data[0]["relation_type"] == "test relation type" + assert admin_response_data[0]["related_metadata_scheme"] == "test" + assert admin_response_data[0]["scheme_uri"] == "test" + assert admin_response_data[0]["scheme_type"] == "test" + assert admin_response_data[0]["resource_type"] == "test" + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "editor test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], ) - assert editor_response.status_code == 200 + + assert editor_response.status_code == 201 editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' - ) + pytest.global_dataset_related_identifier_id_editor = editor_response_data[0]["id"] - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", - json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + assert editor_response_data[0]["identifier"] == "editor test identifier" + assert editor_response_data[0]["identifier_type"] == "test identifier type" + assert editor_response_data[0]["relation_type"] == "test relation type" + assert editor_response_data[0]["related_metadata_scheme"] == "test" + assert editor_response_data[0]["scheme_uri"] == "test" + assert editor_response_data[0]["scheme_type"] == "test" + assert editor_response_data[0]["resource_type"] == "test" + viewer_client = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", + json=[ + { + "identifier": "viewer test identifier", + "identifier_type": "test identifier type", + "relation_type": "test relation type", + "related_metadata_scheme": "test", + "scheme_uri": "test", + "scheme_type": "test", + "resource_type": "test", + } + ], ) - assert viewer_response.status_code == 403 + + assert viewer_client.status_code == 403 -def test_get_dataset_healthsheet_preprocessing_metadata(clients): +def test_get_dataset_related_identifier_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' + When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - description metadata content + related identifier metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["preprocessing"] - == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" ) - # Editor was the last successful PUT request, so the response data should match + assert response.status_code == 200 + assert admin_response.status_code == 200 + assert editor_response.status_code == 200 + assert viewer_response.status_code == 200 + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) -# # ------------------- DATASET HEALTHSHEET USES METADATA ------------------- # -def test_put_healthsheet_uses_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/uses' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet uses metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id + # seach for main title and subtitle index in response_data[n]["titles"] + # pylint: disable=line-too-long - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + # assert len(response_data) == 3 + # assert len(admin_response_data) == 3 + # assert len(editor_response_data) == 3 + # assert len(viewer_response_data) == 3 + assert response_data[0]["identifier"] == "test identifier" + assert response_data[0]["identifier_type"] == "test identifier type" + assert response_data[0]["relation_type"] == "test relation type" + assert response_data[0]["related_metadata_scheme"] == "test" + assert response_data[0]["scheme_uri"] == "test" + assert response_data[0]["scheme_type"] == "test" + assert response_data[0]["resource_type"] == "test" + assert response_data[1]["identifier"] == "admin test identifier" + assert response_data[1]["identifier_type"] == "test identifier type" + assert response_data[1]["relation_type"] == "test relation type" + assert response_data[1]["related_metadata_scheme"] == "test" + assert response_data[1]["scheme_uri"] == "test" + assert response_data[1]["scheme_type"] == "test" + assert response_data[1]["resource_type"] == "test" + assert response_data[2]["identifier"] == "editor test identifier" + assert response_data[2]["identifier_type"] == "test identifier type" + assert response_data[2]["relation_type"] == "test relation type" + assert response_data[2]["related_metadata_scheme"] == "test" + assert response_data[2]["scheme_uri"] == "test" + assert response_data[2]["scheme_type"] == "test" + assert response_data[2]["resource_type"] == "test" - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + assert admin_response_data[0]["identifier"] == "test identifier" + assert admin_response_data[0]["identifier_type"] == "test identifier type" + assert admin_response_data[0]["relation_type"] == "test relation type" + assert admin_response_data[0]["related_metadata_scheme"] == "test" + assert admin_response_data[0]["scheme_uri"] == "test" + assert admin_response_data[0]["scheme_type"] == "test" + assert admin_response_data[0]["resource_type"] == "test" + assert admin_response_data[1]["identifier"] == "admin test identifier" + assert admin_response_data[1]["identifier_type"] == "test identifier type" + assert admin_response_data[1]["relation_type"] == "test relation type" + assert admin_response_data[1]["related_metadata_scheme"] == "test" + assert admin_response_data[1]["scheme_uri"] == "test" + assert admin_response_data[1]["scheme_type"] == "test" + assert admin_response_data[1]["resource_type"] == "test" + assert admin_response_data[2]["identifier"] == "editor test identifier" + assert admin_response_data[2]["identifier_type"] == "test identifier type" + assert admin_response_data[2]["relation_type"] == "test relation type" + assert admin_response_data[2]["related_metadata_scheme"] == "test" + assert admin_response_data[2]["scheme_uri"] == "test" + assert admin_response_data[2]["scheme_type"] == "test" + assert admin_response_data[2]["resource_type"] == "test" - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - ) + assert editor_response_data[0]["identifier"] == "test identifier" + assert editor_response_data[0]["identifier_type"] == "test identifier type" + assert editor_response_data[0]["relation_type"] == "test relation type" + assert editor_response_data[0]["related_metadata_scheme"] == "test" + assert editor_response_data[0]["scheme_uri"] == "test" + assert editor_response_data[0]["scheme_type"] == "test" + assert editor_response_data[0]["resource_type"] == "test" + assert editor_response_data[1]["identifier"] == "admin test identifier" + assert editor_response_data[1]["identifier_type"] == "test identifier type" + assert editor_response_data[1]["relation_type"] == "test relation type" + assert editor_response_data[1]["related_metadata_scheme"] == "test" + assert editor_response_data[1]["scheme_uri"] == "test" + assert editor_response_data[1]["scheme_type"] == "test" + assert editor_response_data[1]["resource_type"] == "test" + assert editor_response_data[2]["identifier"] == "editor test identifier" + assert editor_response_data[2]["identifier_type"] == "test identifier type" + assert editor_response_data[2]["relation_type"] == "test relation type" + assert editor_response_data[2]["related_metadata_scheme"] == "test" + assert editor_response_data[2]["scheme_uri"] == "test" + assert editor_response_data[2]["scheme_type"] == "test" + assert editor_response_data[2]["resource_type"] == "test" - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", - json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 + assert viewer_response_data[0]["identifier"] == "test identifier" + assert viewer_response_data[0]["identifier_type"] == "test identifier type" + assert viewer_response_data[0]["relation_type"] == "test relation type" + assert viewer_response_data[0]["related_metadata_scheme"] == "test" + assert viewer_response_data[0]["scheme_uri"] == "test" + assert viewer_response_data[0]["scheme_type"] == "test" + assert viewer_response_data[0]["resource_type"] == "test" + assert viewer_response_data[1]["identifier"] == "admin test identifier" + assert viewer_response_data[1]["identifier_type"] == "test identifier type" + assert viewer_response_data[1]["relation_type"] == "test relation type" + assert viewer_response_data[1]["related_metadata_scheme"] == "test" + assert viewer_response_data[1]["scheme_uri"] == "test" + assert viewer_response_data[1]["scheme_type"] == "test" + assert viewer_response_data[1]["resource_type"] == "test" + assert viewer_response_data[2]["identifier"] == "editor test identifier" + assert viewer_response_data[2]["identifier_type"] == "test identifier type" + assert viewer_response_data[2]["relation_type"] == "test relation type" + assert viewer_response_data[2]["related_metadata_scheme"] == "test" + assert viewer_response_data[2]["scheme_uri"] == "test" + assert viewer_response_data[2]["scheme_type"] == "test" + assert viewer_response_data[2]["resource_type"] == "test" -def test_get_dataset_healthsheet_uses_metadata(clients): +def test_delete_dataset_related_identifier_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/uses' - endpoint is requested (GET) + When the '/study/{study_id}/dataset/{dataset_id}' + endpoint is requested (DELETE) Then check that the response is valid and retrieves the dataset - description metadata content + related identifier metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + identifier_id = pytest.global_dataset_related_identifier_id + a_identifier_id = pytest.global_dataset_related_identifier_id_admin + e_identifier_id = pytest.global_dataset_related_identifier_id_editor - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{a_identifier_id}" ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{e_identifier_id}" ) - # Editor was the last successful PUT request, so the response data should match + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -# ------------------- DATASET HEALTHSHEET DISTRIBUTION METADATA ------------------- # -def test_put_healthsheet_distribution_dataset_metadata(clients): +# ------------------- DATA MANAGEMENT METADATA ------------------- # +def test_post_dataset_data_management_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/distribution' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet distribution metadata content + When the '/study/{study_id}/dataset/{dataset_id}/metadata/data-management' endpoint is requested (PUT) + Then check that the response is valid and updates the data management metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Value URI", + } + ], + }, ) + assert response.status_code == 200 response_data = json.loads(response.data) - assert ( - response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' - ) + consent = response_data["consent"] + deident = response_data["deident"] + subjects = response_data["subjects"] + pytest.global_dataset_subject_id = subjects[0]["id"] + + assert subjects[0]["scheme"] == "Scheme" + assert subjects[0]["scheme_uri"] == "Scheme URI" + assert subjects[0]["subject"] == "Subject" + assert subjects[0]["value_uri"] == "Value URI" + assert subjects[0]["classification_code"] == "Classification Code" + + assert consent["type"] == "test" + assert consent["noncommercial"] is True + assert consent["geog_restrict"] is True + assert consent["research_type"] is True + assert consent["genetic_only"] is True + assert consent["no_methods"] is True + assert consent["details"] == "test" + + assert deident["type"] == "Level" + assert deident["direct"] is True + assert deident["hipaa"] is True + assert deident["dates"] is True + assert deident["nonarr"] is True + assert deident["k_anon"] is True + assert deident["details"] == "Details" - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + admin_response = _admin_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "admin test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "admin details test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Admin Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Admin Value URI", + } + ], + }, ) + assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + admin_consent = admin_response_data["consent"] + admin_deident = admin_response_data["deident"] + admin_subjects = admin_response_data["subjects"] + + pytest.global_dataset_subject_id_admin = admin_subjects[0]["id"] + + assert admin_subjects[0]["scheme"] == "Admin Scheme" + assert admin_subjects[0]["scheme_uri"] == "Scheme URI" + assert admin_subjects[0]["subject"] == "Subject" + assert admin_subjects[0]["value_uri"] == "Admin Value URI" + assert admin_subjects[0]["classification_code"] == "Classification Code" + + assert admin_consent["type"] == "admin test" + assert admin_consent["details"] == "admin details test" + assert admin_consent["noncommercial"] is True + assert admin_consent["geog_restrict"] is True + assert admin_consent["research_type"] is True + assert admin_consent["genetic_only"] is True + assert admin_consent["no_methods"] is True + + assert admin_deident["type"] == "Level" + assert admin_deident["direct"] is True + assert admin_deident["hipaa"] is True + assert admin_deident["dates"] is True + assert admin_deident["nonarr"] is True + assert admin_deident["k_anon"] is True + assert admin_deident["details"] == "Details" + + editor_response = _editor_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "editor test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "editor details test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Editor Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Editor Value URI", + } + ], + }, ) + assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", - json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + editor_consent = editor_response_data["consent"] + editor_deident = editor_response_data["deident"] + editor_subjects = editor_response_data["subjects"] + pytest.global_dataset_subject_id_editor = editor_subjects[0]["id"] + + assert editor_subjects[0]["scheme"] == "Editor Scheme" + assert editor_subjects[0]["scheme_uri"] == "Scheme URI" + assert editor_subjects[0]["subject"] == "Subject" + assert editor_subjects[0]["value_uri"] == "Editor Value URI" + assert editor_subjects[0]["classification_code"] == "Classification Code" + + assert editor_consent["type"] == "editor test" + assert editor_consent["details"] == "editor details test" + assert editor_consent["noncommercial"] is True + assert editor_consent["geog_restrict"] is True + assert editor_consent["research_type"] is True + assert editor_consent["genetic_only"] is True + assert editor_consent["no_methods"] is True + + assert editor_deident["type"] == "Level" + assert editor_deident["direct"] is True + assert editor_deident["hipaa"] is True + assert editor_deident["dates"] is True + assert editor_deident["nonarr"] is True + assert editor_deident["k_anon"] is True + assert editor_deident["details"] == "Details" + + viewer_response = _viewer_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "viewer test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "viewer details test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Viewer Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Viewer Value URI", + } + ], + }, ) + assert viewer_response.status_code == 403 -def test_get_dataset_healthsheet_distribution_metadata(clients): +def test_get_dataset_data_management_metadata(clients): """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - distribution metadata content + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/data-management' endpoint is requested (GET) + Then check that the response is valid and retrieves the data management metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" - ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["distribution"] - == '[{"id":1,"question":"For","response":"new"}]' + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management" ) - # Editor was the last successful PUT request, so the response data should match - - -# ------------------- DATASET HEALTHSHEET MAINTENANCE METADATA ------------------- # -def test_put_healthsheet_maintenance_dataset_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/healthsheet/maintenance' - endpoint is requested (PUT) - Then check that the response is valid and updates the dataset - healthsheet maintenance metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - - response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) + assert viewer_response.status_code == 200 + + response_data = json.loads(response.data) + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + viewer_response_data = json.loads(viewer_response.data) - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", - json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, - ) - assert viewer_response.status_code == 403 + consent = response_data["consent"] + deident = response_data["deident"] + subjects = response_data["subjects"] + + admin_consent = admin_response_data["consent"] + admin_deident = admin_response_data["deident"] + admin_subjects = admin_response_data["subjects"] + + editor_consent = editor_response_data["consent"] + editor_deident = editor_response_data["deident"] + editor_subjects = editor_response_data["subjects"] + + viewer_consent = viewer_response_data["consent"] + viewer_deident = viewer_response_data["deident"] + viewer_subjects = viewer_response_data["subjects"] + + assert consent["type"] == "editor test" + assert consent["noncommercial"] is True + assert consent["geog_restrict"] is True + assert consent["research_type"] is True + assert consent["genetic_only"] is True + assert consent["no_methods"] is True + assert consent["details"] == "editor details test" + assert admin_consent["type"] == "editor test" + assert admin_consent["noncommercial"] is True + assert admin_consent["geog_restrict"] is True + assert admin_consent["research_type"] is True + assert admin_consent["genetic_only"] is True + assert admin_consent["no_methods"] is True + assert admin_consent["details"] == "editor details test" + assert editor_consent["type"] == "editor test" + assert editor_consent["noncommercial"] is True + assert editor_consent["geog_restrict"] is True + assert editor_consent["research_type"] is True + assert editor_consent["genetic_only"] is True + assert editor_consent["no_methods"] is True + assert editor_consent["details"] == "editor details test" + assert viewer_consent["type"] == "editor test" + assert viewer_consent["noncommercial"] is True + assert viewer_consent["geog_restrict"] is True + assert viewer_consent["research_type"] is True + assert viewer_consent["genetic_only"] is True + assert viewer_consent["no_methods"] is True + assert viewer_consent["details"] == "editor details test" + + assert subjects[0]["scheme"] == "Scheme" + assert subjects[0]["scheme_uri"] == "Scheme URI" + assert subjects[0]["subject"] == "Subject" + assert subjects[0]["value_uri"] == "Value URI" + assert subjects[0]["classification_code"] == "Classification Code" + assert subjects[1]["scheme"] == "Admin Scheme" + assert subjects[1]["scheme_uri"] == "Scheme URI" + assert subjects[1]["subject"] == "Subject" + assert subjects[1]["value_uri"] == "Admin Value URI" + assert subjects[1]["classification_code"] == "Classification Code" + assert subjects[2]["scheme"] == "Editor Scheme" + assert subjects[2]["scheme_uri"] == "Scheme URI" + assert subjects[2]["subject"] == "Subject" + assert subjects[2]["value_uri"] == "Editor Value URI" + assert subjects[2]["classification_code"] == "Classification Code" + + assert admin_subjects[0]["scheme"] == "Scheme" + assert admin_subjects[0]["scheme_uri"] == "Scheme URI" + assert admin_subjects[0]["subject"] == "Subject" + assert admin_subjects[0]["value_uri"] == "Value URI" + assert admin_subjects[0]["classification_code"] == "Classification Code" + assert admin_subjects[1]["scheme"] == "Admin Scheme" + assert admin_subjects[1]["scheme_uri"] == "Scheme URI" + assert admin_subjects[1]["subject"] == "Subject" + assert admin_subjects[1]["value_uri"] == "Admin Value URI" + assert admin_subjects[1]["classification_code"] == "Classification Code" + assert admin_subjects[2]["scheme"] == "Editor Scheme" + assert admin_subjects[2]["scheme_uri"] == "Scheme URI" + assert admin_subjects[2]["subject"] == "Subject" + assert admin_subjects[2]["value_uri"] == "Editor Value URI" + assert admin_subjects[2]["classification_code"] == "Classification Code" + + assert editor_subjects[0]["scheme"] == "Scheme" + assert editor_subjects[0]["scheme_uri"] == "Scheme URI" + assert editor_subjects[0]["subject"] == "Subject" + assert editor_subjects[0]["value_uri"] == "Value URI" + assert editor_subjects[0]["classification_code"] == "Classification Code" + assert editor_subjects[1]["scheme"] == "Admin Scheme" + assert editor_subjects[1]["scheme_uri"] == "Scheme URI" + assert editor_subjects[1]["subject"] == "Subject" + assert editor_subjects[1]["value_uri"] == "Admin Value URI" + assert editor_subjects[1]["classification_code"] == "Classification Code" + assert editor_subjects[2]["scheme"] == "Editor Scheme" + assert editor_subjects[2]["scheme_uri"] == "Scheme URI" + assert editor_subjects[2]["subject"] == "Subject" + assert editor_subjects[2]["value_uri"] == "Editor Value URI" + assert editor_subjects[2]["classification_code"] == "Classification Code" + + assert viewer_subjects[0]["scheme"] == "Scheme" + assert viewer_subjects[0]["scheme_uri"] == "Scheme URI" + assert viewer_subjects[0]["subject"] == "Subject" + assert viewer_subjects[0]["value_uri"] == "Value URI" + assert viewer_subjects[0]["classification_code"] == "Classification Code" + assert viewer_subjects[1]["scheme"] == "Admin Scheme" + assert viewer_subjects[1]["scheme_uri"] == "Scheme URI" + assert viewer_subjects[1]["subject"] == "Subject" + assert viewer_subjects[1]["value_uri"] == "Admin Value URI" + assert viewer_subjects[1]["classification_code"] == "Classification Code" + assert viewer_subjects[2]["scheme"] == "Editor Scheme" + assert viewer_subjects[2]["scheme_uri"] == "Scheme URI" + assert viewer_subjects[2]["subject"] == "Subject" + assert viewer_subjects[2]["value_uri"] == "Editor Value URI" + assert viewer_subjects[2]["classification_code"] == "Classification Code" + + assert deident["type"] == "Level" + assert deident["direct"] is True + assert deident["hipaa"] is True + assert deident["dates"] is True + assert deident["nonarr"] is True + assert deident["k_anon"] is True + assert deident["details"] == "Details" + assert admin_deident["type"] == "Level" + assert admin_deident["direct"] is True + assert admin_deident["hipaa"] is True + assert admin_deident["dates"] is True + assert admin_deident["nonarr"] is True + assert admin_deident["k_anon"] is True + assert admin_deident["details"] == "Details" + assert editor_deident["type"] == "Level" + assert editor_deident["direct"] is True + assert editor_deident["hipaa"] is True + assert editor_deident["dates"] is True + assert editor_deident["nonarr"] is True + assert editor_deident["k_anon"] is True + assert editor_deident["details"] == "Details" + assert viewer_deident["type"] == "Level" + assert viewer_deident["direct"] is True + assert viewer_deident["hipaa"] is True + assert viewer_deident["dates"] is True + assert viewer_deident["nonarr"] is True + assert viewer_deident["k_anon"] is True + assert viewer_deident["details"] == "Details" -def test_get_dataset_healthsheet_maintenance_metadata(clients): +def test_delete_dataset_subject_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance' + When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - maintenance metadata content + subjects metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id + subject_id = pytest.global_dataset_subject_id + admin_sub_id = pytest.global_dataset_subject_id_admin + editor_sub_id = pytest.global_dataset_subject_id_editor - response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" - ) - assert response.status_code == 200 - response_data = json.loads(response.data) - assert ( - response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' - ) - - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" - ) - assert admin_response.status_code == 200 - admin_response_data = json.loads(admin_response.data) - assert ( - admin_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' - ) - - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + viewer_response = _viewer_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - assert ( - editor_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' + response = _logged_in_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" ) - - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" + admin_response = _admin_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{admin_sub_id}" ) - assert viewer_response.status_code == 200 - viewer_response_data = json.loads(viewer_response.data) - assert ( - viewer_response_data["maintenance"] - == '[{"id":1,"question":"For","response":"new"}]' + editor_response = _editor_client.delete( + f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{editor_sub_id}" ) - # Editor was the last successful PUT request, so the response data should match + assert viewer_response.status_code == 403 + assert response.status_code == 204 + assert admin_response.status_code == 204 + assert editor_response.status_code == 204 -# ------------------- DATASET FUNDER METADATA ------------------- # -def test_post_dataset_funder_metadata(clients): +# ------------------- ALTERNATIVE IDENTIFIER METADATA ------------------- # +def test_post_alternative_identifier(clients): """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' endpoint is requested (POST) - Then check that the response is valid and creates the dataset - funder metadata content + Then check that the response is valid and creates the dataset alternative identifier """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", json=[ { - "name": "Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", + "identifier": "identifier test", + "type": "ARK", } ], ) @@ -2445,115 +2834,78 @@ def test_post_dataset_funder_metadata(clients): assert response.status_code == 201 response_data = json.loads(response.data) - pytest.global_dataset_funder_id = response_data[0]["id"] + pytest.global_alternative_identifier_id = response_data[0]["id"] - assert response_data[0]["name"] == "Name" - assert response_data[0]["award_number"] == "award number" - assert response_data[0]["award_title"] == "Award Title" - assert response_data[0]["award_uri"] == "Award URI" - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[0]["identifier_type"] == "Identifier Type" + assert response_data[0]["identifier"] == "identifier test" + assert response_data[0]["type"] == "ARK" admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", json=[ { - "name": "Admin Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", + "identifier": "admin test", + "type": "ARK", } ], ) # Add a one second delay to prevent duplicate timestamps sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_funder_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["name"] == "Admin Name" - assert admin_response_data[0]["award_number"] == "award number" - assert admin_response_data[0]["award_title"] == "Award Title" - assert admin_response_data[0]["award_uri"] == "Award URI" - assert admin_response_data[0]["identifier"] == "Identifier" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[0]["identifier_type"] == "Identifier Type" - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", json=[ { - "name": "Editor Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", + "identifier": "editor test", + "type": "ARK", } ], ) - - assert editor_response.status_code == 201 - editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_funder_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["name"] == "Editor Name" - assert editor_response_data[0]["award_number"] == "award number" - assert editor_response_data[0]["award_title"] == "Award Title" - assert editor_response_data[0]["award_uri"] == "Award URI" - assert editor_response_data[0]["identifier"] == "Identifier" - assert ( - editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - ) # pylint: disable=line-too-long - assert editor_response_data[0]["identifier_type"] == "Identifier Type" - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", json=[ { - "name": "Viewer Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", + "identifier": "viewer test", + "type": "ARK", } ], ) + assert admin_response.status_code == 201 + assert editor_response.status_code == 201 assert viewer_response.status_code == 403 + admin_response_data = json.loads(admin_response.data) + editor_response_data = json.loads(editor_response.data) + pytest.global_alternative_identifier_id_admin = admin_response_data[0]["id"] + pytest.global_alternative_identifier_id_editor = editor_response_data[0]["id"] + + assert admin_response_data[0]["identifier"] == "admin test" + assert admin_response_data[0]["type"] == "ARK" + assert editor_response_data[0]["identifier"] == "editor test" + assert editor_response_data[0]["type"] == "ARK" + -def test_get_dataset_funder_metadata(clients): +def test_get_alternative_identifier(clients): """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - funder metadata content + Then check that the response is valid and retrieves the dataset alternative identifier content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" ) admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" ) editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" ) viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier" ) assert response.status_code == 200 @@ -2566,126 +2918,63 @@ def test_get_dataset_funder_metadata(clients): editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert len(response_data) == 3 - assert len(admin_response_data) == 3 - assert len(editor_response_data) == 3 - assert len(viewer_response_data) == 3 - - assert response_data[0]["name"] == "Name" - assert response_data[0]["award_number"] == "award number" - assert response_data[0]["award_title"] == "Award Title" - assert response_data[0]["award_uri"] == "Award URI" - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[0]["identifier_type"] == "Identifier Type" - assert response_data[1]["name"] == "Admin Name" - assert response_data[1]["award_number"] == "award number" - assert response_data[1]["award_title"] == "Award Title" - assert response_data[1]["award_uri"] == "Award URI" - assert response_data[1]["identifier"] == "Identifier" - assert response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[1]["identifier_type"] == "Identifier Type" - assert response_data[2]["name"] == "Editor Name" - assert response_data[2]["award_number"] == "award number" - assert response_data[2]["award_title"] == "Award Title" - assert response_data[2]["award_uri"] == "Award URI" - assert response_data[2]["identifier"] == "Identifier" - assert response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert response_data[2]["identifier_type"] == "Identifier Type" - - assert admin_response_data[0]["name"] == "Name" - assert admin_response_data[0]["award_number"] == "award number" - assert admin_response_data[0]["award_title"] == "Award Title" - assert admin_response_data[0]["award_uri"] == "Award URI" - assert admin_response_data[0]["identifier"] == "Identifier" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[0]["identifier_type"] == "Identifier Type" - assert admin_response_data[1]["name"] == "Admin Name" - assert admin_response_data[1]["award_number"] == "award number" - assert admin_response_data[1]["award_title"] == "Award Title" - assert admin_response_data[1]["award_uri"] == "Award URI" - assert admin_response_data[1]["identifier"] == "Identifier" - assert admin_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[1]["identifier_type"] == "Identifier Type" - assert admin_response_data[2]["name"] == "Editor Name" - assert admin_response_data[2]["award_number"] == "award number" - assert admin_response_data[2]["award_title"] == "Award Title" - assert admin_response_data[2]["award_uri"] == "Award URI" - assert admin_response_data[2]["identifier"] == "Identifier" - assert admin_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert admin_response_data[2]["identifier_type"] == "Identifier Type" - - assert editor_response_data[0]["name"] == "Name" - assert editor_response_data[0]["award_number"] == "award number" - assert editor_response_data[0]["award_title"] == "Award Title" - assert editor_response_data[0]["award_uri"] == "Award URI" - assert editor_response_data[0]["identifier"] == "Identifier" - assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert editor_response_data[0]["identifier_type"] == "Identifier Type" - assert editor_response_data[1]["name"] == "Admin Name" - assert editor_response_data[1]["award_number"] == "award number" - assert editor_response_data[1]["award_title"] == "Award Title" - assert editor_response_data[1]["award_uri"] == "Award URI" - assert editor_response_data[1]["identifier"] == "Identifier" - assert editor_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert editor_response_data[1]["identifier_type"] == "Identifier Type" - assert editor_response_data[2]["name"] == "Editor Name" - assert editor_response_data[2]["award_number"] == "award number" - assert editor_response_data[2]["award_title"] == "Award Title" - assert editor_response_data[2]["award_uri"] == "Award URI" - assert editor_response_data[2]["identifier"] == "Identifier" - assert editor_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert editor_response_data[2]["identifier_type"] == "Identifier Type" - - assert viewer_response_data[0]["name"] == "Name" - assert viewer_response_data[0]["award_number"] == "award number" - assert viewer_response_data[0]["award_title"] == "Award Title" - assert viewer_response_data[0]["award_uri"] == "Award URI" - assert viewer_response_data[0]["identifier"] == "Identifier" - assert viewer_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert viewer_response_data[0]["identifier_type"] == "Identifier Type" - assert viewer_response_data[1]["name"] == "Admin Name" - assert viewer_response_data[1]["award_number"] == "award number" - assert viewer_response_data[1]["award_title"] == "Award Title" - assert viewer_response_data[1]["award_uri"] == "Award URI" - assert viewer_response_data[1]["identifier"] == "Identifier" - assert viewer_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert viewer_response_data[1]["identifier_type"] == "Identifier Type" - assert viewer_response_data[2]["name"] == "Editor Name" - assert viewer_response_data[2]["award_number"] == "award number" - assert viewer_response_data[2]["award_title"] == "Award Title" - assert viewer_response_data[2]["award_uri"] == "Award URI" - assert viewer_response_data[2]["identifier"] == "Identifier" - assert viewer_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme URI" - assert viewer_response_data[2]["identifier_type"] == "Identifier Type" + assert response_data[0]["identifier"] == "identifier test" + assert response_data[0]["type"] == "ARK" + assert response_data[1]["identifier"] == "admin test" + assert response_data[1]["type"] == "ARK" + assert response_data[2]["identifier"] == "editor test" + assert response_data[2]["type"] == "ARK" + + assert admin_response_data[0]["identifier"] == "identifier test" + assert admin_response_data[0]["type"] == "ARK" + assert admin_response_data[1]["identifier"] == "admin test" + assert admin_response_data[1]["type"] == "ARK" + assert admin_response_data[2]["identifier"] == "editor test" + assert admin_response_data[2]["type"] == "ARK" + + assert editor_response_data[0]["identifier"] == "identifier test" + assert editor_response_data[0]["type"] == "ARK" + assert editor_response_data[1]["identifier"] == "admin test" + assert editor_response_data[1]["type"] == "ARK" + assert editor_response_data[2]["identifier"] == "editor test" + assert editor_response_data[2]["type"] == "ARK" + + assert viewer_response_data[0]["identifier"] == "identifier test" + assert viewer_response_data[0]["type"] == "ARK" + assert viewer_response_data[1]["identifier"] == "admin test" + assert viewer_response_data[1]["type"] == "ARK" + assert viewer_response_data[2]["identifier"] == "editor test" + assert viewer_response_data[2]["type"] == "ARK" -def test_delete_dataset_funder_metadata(clients): +def test_delete_alternative_identifier(clients): """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/funder' + Given a Flask application configured for testing and a study ID + When the '/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier' endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - funder metadata content + Then check that the response is valid and deletes the dataset alternative identifier content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - funder_id = pytest.global_dataset_funder_id - a_funder_id = pytest.global_dataset_funder_id_admin - e_funder_id = pytest.global_dataset_funder_id_editor + identifier_id = pytest.global_alternative_identifier_id + admin_identifier_id = pytest.global_alternative_identifier_id_admin + editor_identifier_id = pytest.global_alternative_identifier_id_editor + # verify Viewer cannot delete viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" ) response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{funder_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{identifier_id}" ) + # pylint: disable=line-too-long admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{a_funder_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{admin_identifier_id}" ) + # pylint: disable=line-too-long editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder/{e_funder_id}" + f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier/{editor_identifier_id}" ) assert viewer_response.status_code == 403 @@ -2694,1298 +2983,722 @@ def test_delete_dataset_funder_metadata(clients): assert editor_response.status_code == 204 -# ------------------- OTHER METADATA ------------------- # -def test_put_other_dataset_metadata(clients): +# ------------------- DATASET HEALTHSHEET MOTIVATION METADATA ------------------- # +def test_put_healthsheet_motivation_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID When the '/study/{study_id}/dataset/{dataset_id}' endpoint is requested (PUT) Then check that the response is valid and updates the dataset - other metadata content + healthsheet metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, ) - assert response.status_code == 200 response_data = json.loads(response.data) - - assert response_data["acknowledgement"] == "Yes" - assert response_data["language"] == "English" - - assert response_data["size"] == ["Size"] - assert response_data["format"] == ["Format"] - assert response_data["standards_followed"] == "Standards Followed" + assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, ) - assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["acknowledgement"] == "Yes" - assert admin_response_data["language"] == "English" - assert admin_response_data["size"] == ["Size"] - assert admin_response_data["format"] == ["Format"] - assert admin_response_data["standards_followed"] == "Standards Followed" + assert ( + admin_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, ) - assert editor_response.status_code == 200 - editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["acknowledgement"] == "Yes" - assert editor_response_data["language"] == "English" - assert editor_response_data["size"] == ["Size"] - assert editor_response_data["format"] == ["Format"] - assert editor_response_data["standards_followed"] == "Standards Followed" - - viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other", - json={ - "acknowledgement": "Yes", - "language": "English", - "resource_type": "Resource Type", - "size": ["Size"], - "format": ["Format"], - "standards_followed": "Standards Followed", - }, + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation", + json={"motivation": '[{"id":1,"question":"For","response":"new"}]'}, ) assert viewer_response.status_code == 403 -def test_get_other_dataset_metadata(clients): +def test_get_dataset_healthsheet_motivation_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - other metadata content + description metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["motivation"] == '[{"id":1,"question":"For","response":"new"}]' + admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/other" + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert response.status_code == 200 - assert admin_response.status_code == 200 + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) assert editor_response.status_code == 200 - # assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - # Editor was the last to update the metadata successfully so - # the response should reflect that - assert response_data["acknowledgement"] == "Yes" - assert response_data["language"] == "English" - # assert response_data["resource_type"] == "Editor Resource Type" - assert response_data["size"] == ["Size"] - assert response_data["format"] == ["Format"] - assert response_data["standards_followed"] == "Standards Followed" - - assert admin_response_data["acknowledgement"] == "Yes" - assert admin_response_data["language"] == "English" - # assert admin_response_data["resource_type"] == "Editor Resource Type" - assert admin_response_data["size"] == ["Size"] - assert admin_response_data["format"] == ["Format"] - assert admin_response_data["standards_followed"] == "Standards Followed" + assert ( + editor_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) - assert editor_response_data["acknowledgement"] == "Yes" - assert editor_response_data["language"] == "English" - # assert editor_response_data["resource_type"] == "Editor Resource Type" - assert editor_response_data["size"] == ["Size"] - assert editor_response_data["format"] == ["Format"] - assert editor_response_data["standards_followed"] == "Standards Followed" + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/motivation" + ) + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["motivation"] + == '[{"id":1,"question":"For","response":"new"}]' + ) - assert viewer_response_data["acknowledgement"] == "Yes" - assert viewer_response_data["language"] == "English" - assert viewer_response_data["size"] == ["Size"] - assert viewer_response_data["format"] == ["Format"] - assert viewer_response_data["standards_followed"] == "Standards Followed" + # Editor was the last successful PUT request, so the response data should match -# ------------------- DATASET MANAGING ORGANIZATION METADATA ------------------- # -def test_put_dataset_managing_organization_metadata(clients): +# # ------------------- DATASET HEALTHSHEET COMPOSITION METADATA ------------------- # +def test_put_healthsheet_composition_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' + When the '/study/{study_id}/dataset/healthsheet/composition' endpoint is requested (PUT) Then check that the response is valid and updates the dataset - managing organization metadata content + healthsheet composition metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, ) - assert response.status_code == 200 response_data = json.loads(response.data) - - assert response_data["name"] == "Managing Organization Name" - assert response_data["identifier"] == "identifier" - assert response_data["identifier_scheme"] == "identifier scheme" - assert response_data["identifier_scheme_uri"] == "identifier scheme_uri" + assert ( + response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' + ) admin_response = _admin_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "admin Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, ) - assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - - assert admin_response_data["name"] == "admin Managing Organization Name" - assert admin_response_data["identifier"] == "identifier" - assert admin_response_data["identifier_scheme"] == "identifier scheme" - assert admin_response_data["identifier_scheme_uri"] == "identifier scheme_uri" + assert ( + admin_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) editor_response = _editor_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "editor Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, ) - assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - - assert editor_response_data["name"] == "editor Managing Organization Name" - assert editor_response_data["identifier"] == "identifier" - assert editor_response_data["identifier_scheme"] == "identifier scheme" - assert editor_response_data["identifier_scheme_uri"] == "identifier scheme_uri" + assert ( + editor_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) viewer_response = _viewer_client.put( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization", - json={ - "name": "editor Managing Organization Name", - "identifier": "identifier", - "identifier_scheme": "identifier scheme", - "identifier_scheme_uri": "identifier scheme_uri", - }, + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition", + json={"composition": '[{"id":1,"question":"For","response":"new"}]'}, ) - assert viewer_response.status_code == 403 -def test_get_dataset_managing_organization_metadata(clients): +def test_get_dataset_healthsheet_composition_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/composition' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - managing-organization metadata content + description metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["composition"] == '[{"id":1,"question":"For","response":"new"}]' + ) + admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' ) + editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/managing-organization" + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/composition" + ) assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["composition"] + == '[{"id":1,"question":"For","response":"new"}]' + ) - # Editor was the last to update the metadata successfully so - # the response should reflect that - assert response_data["name"] == "editor Managing Organization Name" - assert response_data["identifier"] == "identifier" - assert response_data["identifier_scheme"] == "identifier scheme" - assert response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - assert admin_response_data["name"] == "editor Managing Organization Name" - assert admin_response_data["identifier"] == "identifier" - assert admin_response_data["identifier_scheme"] == "identifier scheme" - assert admin_response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - assert editor_response_data["name"] == "editor Managing Organization Name" - assert editor_response_data["identifier"] == "identifier" - assert editor_response_data["identifier_scheme"] == "identifier scheme" - assert editor_response_data["identifier_scheme_uri"] == "identifier scheme_uri" - - assert viewer_response_data["name"] == "editor Managing Organization Name" - assert viewer_response_data["identifier"] == "identifier" - assert viewer_response_data["identifier_scheme"] == "identifier scheme" - assert viewer_response_data["identifier_scheme_uri"] == "identifier scheme_uri" + # Editor was the last successful PUT request, so the response data should match -# ------------------- RELATED IDENTIFIER METADATA ------------------- # -def test_post_dataset_related_identifier_metadata(clients): +# ------------------- DATASET HEALTHSHEET COLLECTION METADATA ------------------- # +def test_put_healthsheet_collection_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - related identifier metadata content + When the '/study/{study_id}/dataset/healthsheet/collection' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet collection metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 + assert response.status_code == 200 response_data = json.loads(response.data) + assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' - pytest.global_dataset_related_identifier_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "test identifier" - assert response_data[0]["identifier_type"] == "test identifier type" - assert response_data[0]["relation_type"] == "test relation type" - assert response_data[0]["related_metadata_scheme"] == "test" - assert response_data[0]["scheme_uri"] == "test" - assert response_data[0]["scheme_type"] == "test" - assert response_data[0]["resource_type"] == "test" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "admin test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], - ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 - admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_related_identifier_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "admin test identifier" - assert admin_response_data[0]["identifier_type"] == "test identifier type" - assert admin_response_data[0]["relation_type"] == "test relation type" - assert admin_response_data[0]["related_metadata_scheme"] == "test" - assert admin_response_data[0]["scheme_uri"] == "test" - assert admin_response_data[0]["scheme_type"] == "test" - assert admin_response_data[0]["resource_type"] == "test" - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "editor test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert editor_response.status_code == 201 + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_related_identifier_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["identifier"] == "editor test identifier" - assert editor_response_data[0]["identifier_type"] == "test identifier type" - assert editor_response_data[0]["relation_type"] == "test relation type" - assert editor_response_data[0]["related_metadata_scheme"] == "test" - assert editor_response_data[0]["scheme_uri"] == "test" - assert editor_response_data[0]["scheme_type"] == "test" - assert editor_response_data[0]["resource_type"] == "test" - viewer_client = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier", - json=[ - { - "identifier": "viewer test identifier", - "identifier_type": "test identifier type", - "relation_type": "test relation type", - "related_metadata_scheme": "test", - "scheme_uri": "test", - "scheme_type": "test", - "resource_type": "test", - } - ], + assert ( + editor_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert viewer_client.status_code == 403 + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection", + json={"collection": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 -def test_get_dataset_related_identifier_metadata(clients): +def test_get_dataset_healthsheet_collection_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/collection' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - related identifier metadata content + description metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["collection"] == '[{"id":1,"question":"For","response":"new"}]' + admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' ) + editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier" + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/collection" + ) assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["collection"] + == '[{"id":1,"question":"For","response":"new"}]' + ) - # seach for main title and subtitle index in response_data[n]["titles"] - # pylint: disable=line-too-long + # Editor was the last successful PUT request, so the response data should match - # assert len(response_data) == 3 - # assert len(admin_response_data) == 3 - # assert len(editor_response_data) == 3 - # assert len(viewer_response_data) == 3 - assert response_data[0]["identifier"] == "test identifier" - assert response_data[0]["identifier_type"] == "test identifier type" - assert response_data[0]["relation_type"] == "test relation type" - assert response_data[0]["related_metadata_scheme"] == "test" - assert response_data[0]["scheme_uri"] == "test" - assert response_data[0]["scheme_type"] == "test" - assert response_data[0]["resource_type"] == "test" - assert response_data[1]["identifier"] == "admin test identifier" - assert response_data[1]["identifier_type"] == "test identifier type" - assert response_data[1]["relation_type"] == "test relation type" - assert response_data[1]["related_metadata_scheme"] == "test" - assert response_data[1]["scheme_uri"] == "test" - assert response_data[1]["scheme_type"] == "test" - assert response_data[1]["resource_type"] == "test" - assert response_data[2]["identifier"] == "editor test identifier" - assert response_data[2]["identifier_type"] == "test identifier type" - assert response_data[2]["relation_type"] == "test relation type" - assert response_data[2]["related_metadata_scheme"] == "test" - assert response_data[2]["scheme_uri"] == "test" - assert response_data[2]["scheme_type"] == "test" - assert response_data[2]["resource_type"] == "test" - assert admin_response_data[0]["identifier"] == "test identifier" - assert admin_response_data[0]["identifier_type"] == "test identifier type" - assert admin_response_data[0]["relation_type"] == "test relation type" - assert admin_response_data[0]["related_metadata_scheme"] == "test" - assert admin_response_data[0]["scheme_uri"] == "test" - assert admin_response_data[0]["scheme_type"] == "test" - assert admin_response_data[0]["resource_type"] == "test" - assert admin_response_data[1]["identifier"] == "admin test identifier" - assert admin_response_data[1]["identifier_type"] == "test identifier type" - assert admin_response_data[1]["relation_type"] == "test relation type" - assert admin_response_data[1]["related_metadata_scheme"] == "test" - assert admin_response_data[1]["scheme_uri"] == "test" - assert admin_response_data[1]["scheme_type"] == "test" - assert admin_response_data[1]["resource_type"] == "test" - assert admin_response_data[2]["identifier"] == "editor test identifier" - assert admin_response_data[2]["identifier_type"] == "test identifier type" - assert admin_response_data[2]["relation_type"] == "test relation type" - assert admin_response_data[2]["related_metadata_scheme"] == "test" - assert admin_response_data[2]["scheme_uri"] == "test" - assert admin_response_data[2]["scheme_type"] == "test" - assert admin_response_data[2]["resource_type"] == "test" +# ------------------- DATASET HEALTHSHEET PREPROCESSING METADATA ------------------- # +def test_put_healthsheet_preprocessing_dataset_metadata(clients): + """ + Given a Flask application configured for testing and a study ID and dataset ID + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet preprocessing metadata content + """ + _logged_in_client, _admin_client, _editor_client, _viewer_client = clients + study_id = pytest.global_study_id["id"] # type: ignore + dataset_id = pytest.global_dataset_id - assert editor_response_data[0]["identifier"] == "test identifier" - assert editor_response_data[0]["identifier_type"] == "test identifier type" - assert editor_response_data[0]["relation_type"] == "test relation type" - assert editor_response_data[0]["related_metadata_scheme"] == "test" - assert editor_response_data[0]["scheme_uri"] == "test" - assert editor_response_data[0]["scheme_type"] == "test" - assert editor_response_data[0]["resource_type"] == "test" - assert editor_response_data[1]["identifier"] == "admin test identifier" - assert editor_response_data[1]["identifier_type"] == "test identifier type" - assert editor_response_data[1]["relation_type"] == "test relation type" - assert editor_response_data[1]["related_metadata_scheme"] == "test" - assert editor_response_data[1]["scheme_uri"] == "test" - assert editor_response_data[1]["scheme_type"] == "test" - assert editor_response_data[1]["resource_type"] == "test" - assert editor_response_data[2]["identifier"] == "editor test identifier" - assert editor_response_data[2]["identifier_type"] == "test identifier type" - assert editor_response_data[2]["relation_type"] == "test relation type" - assert editor_response_data[2]["related_metadata_scheme"] == "test" - assert editor_response_data[2]["scheme_uri"] == "test" - assert editor_response_data[2]["scheme_type"] == "test" - assert editor_response_data[2]["resource_type"] == "test" + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' + ) - assert viewer_response_data[0]["identifier"] == "test identifier" - assert viewer_response_data[0]["identifier_type"] == "test identifier type" - assert viewer_response_data[0]["relation_type"] == "test relation type" - assert viewer_response_data[0]["related_metadata_scheme"] == "test" - assert viewer_response_data[0]["scheme_uri"] == "test" - assert viewer_response_data[0]["scheme_type"] == "test" - assert viewer_response_data[0]["resource_type"] == "test" - assert viewer_response_data[1]["identifier"] == "admin test identifier" - assert viewer_response_data[1]["identifier_type"] == "test identifier type" - assert viewer_response_data[1]["relation_type"] == "test relation type" - assert viewer_response_data[1]["related_metadata_scheme"] == "test" - assert viewer_response_data[1]["scheme_uri"] == "test" - assert viewer_response_data[1]["scheme_type"] == "test" - assert viewer_response_data[1]["resource_type"] == "test" - assert viewer_response_data[2]["identifier"] == "editor test identifier" - assert viewer_response_data[2]["identifier_type"] == "test identifier type" - assert viewer_response_data[2]["relation_type"] == "test relation type" - assert viewer_response_data[2]["related_metadata_scheme"] == "test" - assert viewer_response_data[2]["scheme_uri"] == "test" - assert viewer_response_data[2]["scheme_type"] == "test" - assert viewer_response_data[2]["resource_type"] == "test" + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) -def test_delete_dataset_related_identifier_metadata(clients): + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing", + json={"preprocessing": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert viewer_response.status_code == 403 + + +def test_get_dataset_healthsheet_preprocessing_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}' - endpoint is requested (DELETE) + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing' + endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - related identifier metadata content + description metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - identifier_id = pytest.global_dataset_related_identifier_id - a_identifier_id = pytest.global_dataset_related_identifier_id_admin - e_identifier_id = pytest.global_dataset_related_identifier_id_editor + response = _logged_in_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["preprocessing"] == '[{"id":1,"question":"For","response":"new"}]' + ) + + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' + ) - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{identifier_id}" + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{a_identifier_id}" + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/preprocessing" ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/related-identifier/{e_identifier_id}" + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["preprocessing"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + # Editor was the last successful PUT request, so the response data should match -# # ------------------- RIGHTS METADATA ------------------- # -def test_post_dataset_rights_metadata(clients): +# # ------------------- DATASET HEALTHSHEET USES METADATA ------------------- # +def test_put_healthsheet_uses_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - rights metadata content + When the '/study/{study_id}/dataset/healthsheet/uses' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet uses metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 + assert response.status_code == 200 response_data = json.loads(response.data) - pytest.global_dataset_rights_id = response_data[0]["id"] - - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[0]["rights"] == "Rights" - assert response_data[0]["uri"] == "URI" - assert response_data[0]["license_text"] == "license text" + assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Admin Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert admin_response.status_code == 201 + assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_rights_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["identifier"] == "Admin Identifier" - assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[0]["rights"] == "Rights" - assert admin_response_data[0]["uri"] == "URI" - assert admin_response_data[0]["license_text"] == "license text" + assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Editor Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, ) - - assert editor_response.status_code == 201 + assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_rights_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["identifier"] == "Editor Identifier" - assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[0]["rights"] == "Rights" - assert editor_response_data[0]["uri"] == "URI" - assert editor_response_data[0]["license_text"] == "license text" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Viewer Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], + assert ( + editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' ) + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses", + json={"uses": '[{"id":1,"question":"For","response":"new"}]'}, + ) assert viewer_response.status_code == 403 -def test_get_dataset_rights_metadata(clients): +def test_get_dataset_healthsheet_uses_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study' + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/uses' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - rights metadata content + description metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" ) + assert response.status_code == 200 + response_data = json.loads(response.data) + assert response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' + admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" ) - - assert response.status_code == 200 assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - - response_data = json.loads(response.data) admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert response_data[0]["identifier"] == "Identifier" - assert response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[0]["rights"] == "Rights" - assert response_data[0]["uri"] == "URI" - assert response_data[0]["license_text"] == "license text" - - assert admin_response_data[0]["identifier"] == "Identifier" - assert admin_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[0]["rights"] == "Rights" - assert admin_response_data[0]["uri"] == "URI" - assert admin_response_data[0]["license_text"] == "license text" - - assert editor_response_data[0]["identifier"] == "Identifier" - assert editor_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[0]["rights"] == "Rights" - assert editor_response_data[0]["uri"] == "URI" - assert editor_response_data[0]["license_text"] == "license text" - - assert response_data[1]["identifier"] == "Admin Identifier" - assert response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[1]["rights"] == "Rights" - assert response_data[1]["uri"] == "URI" - assert response_data[1]["license_text"] == "license text" - - assert admin_response_data[1]["identifier"] == "Admin Identifier" - assert admin_response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[1]["rights"] == "Rights" - assert admin_response_data[1]["uri"] == "URI" - assert admin_response_data[1]["license_text"] == "license text" - - assert editor_response_data[1]["identifier"] == "Admin Identifier" - assert editor_response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[1]["rights"] == "Rights" - assert editor_response_data[1]["uri"] == "URI" - assert editor_response_data[1]["license_text"] == "license text" - - assert response_data[2]["identifier"] == "Editor Identifier" - assert response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert response_data[2]["rights"] == "Rights" - assert response_data[2]["uri"] == "URI" - assert response_data[2]["license_text"] == "license text" - - assert admin_response_data[2]["identifier"] == "Editor Identifier" - assert admin_response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert admin_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert admin_response_data[2]["rights"] == "Rights" - assert admin_response_data[2]["uri"] == "URI" - assert admin_response_data[2]["license_text"] == "license text" - - assert editor_response_data[2]["identifier"] == "Editor Identifier" - assert editor_response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert editor_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert editor_response_data[2]["rights"] == "Rights" - assert editor_response_data[2]["uri"] == "URI" - assert editor_response_data[2]["license_text"] == "license text" - - assert viewer_response_data[0]["identifier"] == "Identifier" - assert viewer_response_data[0]["identifier_scheme"] == "Identifier Scheme" - assert viewer_response_data[0]["identifier_scheme_uri"] == "Identifier Scheme" - assert viewer_response_data[0]["rights"] == "Rights" - assert viewer_response_data[0]["uri"] == "URI" - assert viewer_response_data[0]["license_text"] == "license text" - - assert viewer_response_data[1]["identifier"] == "Admin Identifier" - assert viewer_response_data[1]["identifier_scheme"] == "Identifier Scheme" - assert viewer_response_data[1]["identifier_scheme_uri"] == "Identifier Scheme" - assert viewer_response_data[1]["rights"] == "Rights" - assert viewer_response_data[1]["uri"] == "URI" - assert viewer_response_data[1]["license_text"] == "license text" - - assert viewer_response_data[2]["identifier"] == "Editor Identifier" - assert viewer_response_data[2]["identifier_scheme"] == "Identifier Scheme" - assert viewer_response_data[2]["identifier_scheme_uri"] == "Identifier Scheme" - assert viewer_response_data[2]["rights"] == "Rights" - assert viewer_response_data[2]["uri"] == "URI" - assert viewer_response_data[2]["license_text"] == "license text" - - -def test_delete_dataset_rights_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/rights' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - rights metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - rights_id = pytest.global_dataset_rights_id - a_rights_id = pytest.global_dataset_rights_id_admin - e_rights_id = pytest.global_dataset_rights_id_editor + assert admin_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{rights_id}" + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{a_rights_id}" + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/uses" ) - editor_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights/{e_rights_id}" + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["uses"] == '[{"id":1,"question":"For","response":"new"}]' ) - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + # Editor was the last successful PUT request, so the response data should match -# ------------------- SUBJECTS METADATA ------------------- # -def test_post_dataset_subjects_metadata(clients): +# ------------------- DATASET HEALTHSHEET DISTRIBUTION METADATA ------------------- # +def test_put_healthsheet_distribution_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - subjects metadata content + When the '/study/{study_id}/dataset/healthsheet/distribution' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet distribution metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Value URI", - } - ], + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 + assert response.status_code == 200 response_data = json.loads(response.data) - pytest.global_dataset_subject_id = response_data[0]["id"] - - assert response_data[0]["scheme"] == "Scheme" - assert response_data[0]["scheme_uri"] == "Scheme URI" - assert response_data[0]["subject"] == "Subject" - assert response_data[0]["value_uri"] == "Value URI" - assert response_data[0]["classification_code"] == "Classification Code" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Admin Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Admin Value URI", - } - ], + assert ( + response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - assert admin_response.status_code == 201 + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_subject_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["scheme"] == "Admin Scheme" - assert admin_response_data[0]["scheme_uri"] == "Scheme URI" - assert admin_response_data[0]["subject"] == "Subject" - assert admin_response_data[0]["value_uri"] == "Admin Value URI" - assert admin_response_data[0]["classification_code"] == "Classification Code" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Editor Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Editor Value URI", - } - ], + assert ( + admin_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert editor_response.status_code == 201 + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_subject_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["scheme"] == "Editor Scheme" - assert editor_response_data[0]["scheme_uri"] == "Scheme URI" - assert editor_response_data[0]["subject"] == "Subject" - assert editor_response_data[0]["value_uri"] == "Editor Value URI" - assert editor_response_data[0]["classification_code"] == "Classification Code" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Viewer Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Viewer Value URI", - } - ], + assert ( + editor_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' ) + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution", + json={"distribution": '[{"id":1,"question":"For","response":"new"}]'}, + ) assert viewer_response.status_code == 403 -def test_get_dataset_subjects_metadata(clients): +def test_get_dataset_healthsheet_distribution_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - subjects metadata content + distribution metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" ) - assert response.status_code == 200 + response_data = json.loads(response.data) + assert ( + response_data["distribution"] == '[{"id":1,"question":"For","response":"new"}]' + ) + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" + ) + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' + ) -def test_delete_dataset_subject_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/subject' - endpoint is requested (GET) - Then check that the response is valid and retrieves the dataset - subjects metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - subject_id = pytest.global_dataset_subject_id - admin_sub_id = pytest.global_dataset_subject_id_admin - editor_sub_id = pytest.global_dataset_subject_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{subject_id}" + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{admin_sub_id}" + + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/distribution" ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject/{editor_sub_id}" + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["distribution"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + # Editor was the last successful PUT request, so the response data should match -# ------------------- TITLE METADATA ------------------- # -def test_post_dataset_title_metadata(clients): +# ------------------- DATASET HEALTHSHEET MAINTENANCE METADATA ------------------- # +def test_put_healthsheet_maintenance_dataset_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' - endpoint is requested (POST) - Then check that the response is valid and creates the dataset - title metadata content + When the '/study/{study_id}/dataset/healthsheet/maintenance' + endpoint is requested (PUT) + Then check that the response is valid and updates the dataset + healthsheet maintenance metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id - response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Owner Title", "type": "Subtitle"}], + response = _logged_in_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - - assert response.status_code == 201 + assert response.status_code == 200 response_data = json.loads(response.data) - pytest.global_dataset_title_id = response_data[0]["id"] - - assert response_data[0]["title"] == "Owner Title" - assert response_data[0]["type"] == "Subtitle" - - admin_response = _admin_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Admin Title", "type": "Subtitle"}], + assert ( + response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' ) - # Add a one second delay to prevent duplicate timestamps - sleep(1) - assert admin_response.status_code == 201 + admin_response = _admin_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert admin_response.status_code == 200 admin_response_data = json.loads(admin_response.data) - pytest.global_dataset_title_id_admin = admin_response_data[0]["id"] - - assert admin_response_data[0]["title"] == "Admin Title" - assert admin_response_data[0]["type"] == "Subtitle" - - editor_response = _editor_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Editor Title", "type": "Subtitle"}], + assert ( + admin_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert editor_response.status_code == 201 + editor_response = _editor_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) + assert editor_response.status_code == 200 editor_response_data = json.loads(editor_response.data) - pytest.global_dataset_title_id_editor = editor_response_data[0]["id"] - - assert editor_response_data[0]["title"] == "Editor Title" - assert editor_response_data[0]["type"] == "Subtitle" - - viewer_response = _viewer_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title", - json=[{"title": "Viewer Title", "type": "Subtitle"}], + assert ( + editor_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' ) + viewer_response = _viewer_client.put( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance", + json={"maintenance": '[{"id":1,"question":"For","response":"new"}]'}, + ) assert viewer_response.status_code == 403 -def test_get_dataset_title_metadata(clients): +def test_get_dataset_healthsheet_maintenance_metadata(clients): """ Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/title' + When the '/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance' endpoint is requested (GET) Then check that the response is valid and retrieves the dataset - title metadata content + maintenance metadata content """ _logged_in_client, _admin_client, _editor_client, _viewer_client = clients study_id = pytest.global_study_id["id"] # type: ignore dataset_id = pytest.global_dataset_id response = _logged_in_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" - ) - admin_response = _admin_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" - ) - editor_response = _editor_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" - ) - viewer_response = _viewer_client.get( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title" + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" ) - assert response.status_code == 200 - assert admin_response.status_code == 200 - assert editor_response.status_code == 200 - assert viewer_response.status_code == 200 - response_data = json.loads(response.data) - admin_response_data = json.loads(admin_response.data) - editor_response_data = json.loads(editor_response.data) - viewer_response_data = json.loads(viewer_response.data) - - assert len(response_data) == 4 - assert len(admin_response_data) == 4 - assert len(editor_response_data) == 4 - assert len(viewer_response_data) == 4 - - # search for maintitle index - # pylint: disable=line-too-long - main_title = next( - (index for (index, d) in enumerate(response_data) if d["type"] == "MainTitle"), - None, - ) - a_main_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["type"] == "MainTitle" - ), - None, - ) - e_main_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["type"] == "MainTitle" - ), - None, - ) - v_main_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["type"] == "MainTitle" - ), - None, - ) - # search for admin title index - admin_title = next( - ( - index - for (index, d) in enumerate(response_data) - if d["title"] == "Admin Title" - ), - None, - ) - a_admin_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["title"] == "Admin Title" - ), - None, - ) - e_admin_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["title"] == "Admin Title" - ), - None, - ) - v_admin_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["title"] == "Admin Title" - ), - None, + assert ( + response_data["maintenance"] == '[{"id":1,"question":"For","response":"new"}]' ) - # search for editor title index - editor_title = next( - ( - index - for (index, d) in enumerate(response_data) - if d["title"] == "Editor Title" - ), - None, - ) - a_editor_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["title"] == "Editor Title" - ), - None, - ) - e_editor_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["title"] == "Editor Title" - ), - None, + admin_response = _admin_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" ) - v_editor_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["title"] == "Editor Title" - ), - None, + assert admin_response.status_code == 200 + admin_response_data = json.loads(admin_response.data) + assert ( + admin_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' ) - # search for owner title index - own_title = next( - ( - index - for (index, d) in enumerate(response_data) - if d["title"] == "Owner Title" - ), - None, - ) - a_own_title = next( - ( - index - for (index, d) in enumerate(admin_response_data) - if d["title"] == "Owner Title" - ), - None, - ) - e_own_title = next( - ( - index - for (index, d) in enumerate(editor_response_data) - if d["title"] == "Owner Title" - ), - None, + editor_response = _editor_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" ) - v_own_title = next( - ( - index - for (index, d) in enumerate(viewer_response_data) - if d["title"] == "Owner Title" - ), - None, + assert editor_response.status_code == 200 + editor_response_data = json.loads(editor_response.data) + assert ( + editor_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert response_data[main_title]["title"] == "Dataset Title" - assert response_data[main_title]["type"] == "MainTitle" - assert response_data[own_title]["title"] == "Owner Title" - assert response_data[own_title]["type"] == "Subtitle" - assert response_data[admin_title]["title"] == "Admin Title" - assert response_data[admin_title]["type"] == "Subtitle" - assert response_data[editor_title]["title"] == "Editor Title" - assert response_data[editor_title]["type"] == "Subtitle" - - assert admin_response_data[a_main_title]["title"] == "Dataset Title" - assert admin_response_data[a_main_title]["type"] == "MainTitle" - assert admin_response_data[a_own_title]["title"] == "Owner Title" - assert admin_response_data[a_own_title]["type"] == "Subtitle" - assert admin_response_data[a_admin_title]["title"] == "Admin Title" - assert admin_response_data[a_admin_title]["type"] == "Subtitle" - assert admin_response_data[a_editor_title]["title"] == "Editor Title" - assert admin_response_data[a_editor_title]["type"] == "Subtitle" - - assert editor_response_data[e_main_title]["title"] == "Dataset Title" - assert editor_response_data[e_main_title]["type"] == "MainTitle" - assert editor_response_data[e_own_title]["title"] == "Owner Title" - assert editor_response_data[e_own_title]["type"] == "Subtitle" - assert editor_response_data[e_admin_title]["title"] == "Admin Title" - assert editor_response_data[e_admin_title]["type"] == "Subtitle" - assert editor_response_data[e_editor_title]["title"] == "Editor Title" - assert editor_response_data[e_editor_title]["type"] == "Subtitle" - - assert viewer_response_data[v_main_title]["title"] == "Dataset Title" - assert viewer_response_data[v_main_title]["type"] == "MainTitle" - assert viewer_response_data[v_own_title]["title"] == "Owner Title" - assert viewer_response_data[v_own_title]["type"] == "Subtitle" - assert viewer_response_data[v_admin_title]["title"] == "Admin Title" - assert viewer_response_data[v_admin_title]["type"] == "Subtitle" - assert viewer_response_data[v_editor_title]["title"] == "Editor Title" - assert viewer_response_data[v_editor_title]["type"] == "Subtitle" - - -def test_delete_dataset_title_metadata(clients): - """ - Given a Flask application configured for testing and a study ID and dataset ID - When the '/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}' - endpoint is requested (DELETE) - Then check that the response is valid and deletes the dataset - title metadata content - """ - _logged_in_client, _admin_client, _editor_client, _viewer_client = clients - study_id = pytest.global_study_id["id"] # type: ignore - dataset_id = pytest.global_dataset_id - title_id = pytest.global_dataset_title_id - admin_title_id = pytest.global_dataset_title_id_admin - editor_title_id = pytest.global_dataset_title_id_editor - - viewer_response = _viewer_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" - ) - response = _logged_in_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{title_id}" - ) - admin_response = _admin_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{admin_title_id}" + viewer_response = _viewer_client.get( + f"/study/{study_id}/dataset/{dataset_id}/healthsheet/maintenance" ) - editor_response = _editor_client.delete( - f"/study/{study_id}/dataset/{dataset_id}/metadata/title/{editor_title_id}" + assert viewer_response.status_code == 200 + viewer_response_data = json.loads(viewer_response.data) + assert ( + viewer_response_data["maintenance"] + == '[{"id":1,"question":"For","response":"new"}]' ) - assert viewer_response.status_code == 403 - assert response.status_code == 204 - assert admin_response.status_code == 204 - assert editor_response.status_code == 204 + # Editor was the last successful PUT request, so the response data should match diff --git a/tests/functional/test_060_study_version_api.py b/tests/functional/test_060_study_version_api.py index 2ceee352..dcc0396f 100644 --- a/tests/functional/test_060_study_version_api.py +++ b/tests/functional/test_060_study_version_api.py @@ -696,91 +696,125 @@ def test_get_version_dataset_metadata(clients): dataset_id = pytest.global_dataset_id # type: ignore version_id = pytest.global_dataset_version_id # type: ignore - contributor_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/contributor", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "contributor_type": "Con Type", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - creator_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/creator", - json=[ - { - "given_name": "Given Name here", - "family_name": "Family Name here", - "name_type": "Personal", - "name_identifier": "Name identifier", - "name_identifier_scheme": "Name Scheme ID", - "name_identifier_scheme_uri": "Name ID Scheme URI", - "affiliations": [ - { - "name": "Test", - "identifier": "yes", - "scheme": "uh", - "scheme_uri": "scheme uri", - } - ], - } - ], - ) - date_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/date", - json=[{"date": 20210101, "type": "Type", "information": "Info"}], + team_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/team", + json={ + "creators": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "contributors": [ + { + "given_name": "Given Name here", + "family_name": "Family Name here", + "name_type": "Personal", + "name_identifier": "Name identifier", + "name_identifier_scheme": "Name Scheme ID", + "name_identifier_scheme_uri": "Name ID Scheme URI", + "contributor_type": "Con Type", + "affiliations": [ + { + "name": "Test", + "identifier": "yes", + "scheme": "uh", + "scheme_uri": "scheme uri", + } + ], + } + ], + "managing_organization": { + "name": "editor Managing Organization Name", + "identifier": "identifier", + "identifier_scheme": "identifier scheme", + "identifier_scheme_uri": "identifier scheme_uri", + }, + "funders": [ + { + "name": "Name", + "award_number": "award number", + "award_title": "Award Title", + "award_uri": "Award URI", + "identifier": "Identifier", + "identifier_scheme_uri": "Identifier Scheme URI", + "identifier_type": "Identifier Type", + } + ], + }, ) - funder_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/funder", - json=[ - { - "name": "Name", - "award_number": "award number", - "award_title": "Award Title", - "award_uri": "Award URI", - "identifier": "Identifier", - "identifier_scheme_uri": "Identifier Scheme URI", - "identifier_type": "Identifier Type", - } - ], + general_information_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/general-information", + json={ + "titles": [{"title": "Title", "type": "Subtitle"}], + "descriptions": [{"description": "Owner Description", "type": "Methods"}], + "dates": [{"date": 20210101, "type": "Accepted", "information": "Info"}], + }, ) - rights_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/rights", - json=[ - { - "identifier": "Identifier", - "identifier_scheme": "Identifier Scheme", - "identifier_scheme_uri": "Identifier Scheme", - "rights": "Rights", - "uri": "URI", - "license_text": "license text", - } - ], + access_rights_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/access-rights", + json={ + "access": { + "type": "editor type", + "description": "editor description", + "url": "google.com", + "url_last_checked": 123, + }, + "rights": [ + { + "identifier": "Identifier", + "identifier_scheme": "Identifier Scheme", + "identifier_scheme_uri": "Identifier Scheme", + "rights": "Rights", + "uri": "URI", + "license_text": "license text", + } + ], + }, ) - subject_response = _logged_in_client.post( - f"/study/{study_id}/dataset/{dataset_id}/metadata/subject", - json=[ - { - "classification_code": "Classification Code", - "scheme": "Scheme", - "scheme_uri": "Scheme URI", - "subject": "Subject", - "value_uri": "Value URI", - } - ], + dataset_data_management_response = _logged_in_client.post( + f"/study/{study_id}/dataset/{dataset_id}/metadata/data-management", + json={ + "consent": { + "type": "test", + "noncommercial": True, + "geog_restrict": True, + "research_type": True, + "genetic_only": True, + "no_methods": True, + "details": "test", + }, + "deident": { + "type": "Level", + "direct": True, + "hipaa": True, + "dates": True, + "nonarr": True, + "k_anon": True, + "details": "Details", + }, + "subjects": [ + { + "classification_code": "Classification Code", + "scheme": "Scheme", + "scheme_uri": "Scheme URI", + "subject": "Subject", + "value_uri": "Value URI", + } + ], + }, ) alt_identifier_response = _logged_in_client.post( f"/study/{study_id}/dataset/{dataset_id}/metadata/alternative-identifier", @@ -805,13 +839,11 @@ def test_get_version_dataset_metadata(clients): } ], ) - assert contributor_response.status_code == 201 - assert creator_response.status_code == 201 - assert date_response.status_code == 201 - assert funder_response.status_code == 201 - assert rights_response.status_code == 201 - assert subject_response.status_code == 201 + assert team_response.status_code == 200 + assert access_rights_response.status_code == 200 assert alt_identifier_response.status_code == 201 + assert general_information_response.status_code == 200 + assert dataset_data_management_response.status_code == 200 assert related_identifier_response.status_code == 201 response = _logged_in_client.get( @@ -844,7 +876,7 @@ def test_get_version_dataset_metadata(clients): assert response_data["contributors"][0]["contributor_type"] == "Con Type" assert response_data["dates"][0]["date"] == "01-01-1970" - assert response_data["dates"][0]["type"] == "Type" + assert response_data["dates"][0]["type"] == "Accepted" assert response_data["creators"][0]["last_name"] == "Family Name here" assert response_data["creators"][0]["first_name"] == "Given Name here" @@ -894,7 +926,7 @@ def test_get_version_dataset_metadata(clients): assert admin_response_data["contributors"][0]["name_type"] == "Personal" assert admin_response_data["contributors"][0]["contributor_type"] == "Con Type" assert admin_response_data["dates"][0]["date"] == "01-01-1970" - assert admin_response_data["dates"][0]["type"] == "Type" + assert admin_response_data["dates"][0]["type"] == "Accepted" assert admin_response_data["creators"][0]["first_name"] == "Given Name here" assert admin_response_data["creators"][0]["last_name"] == "Family Name here" assert admin_response_data["creators"][0]["name_type"] == "Personal" @@ -938,7 +970,7 @@ def test_get_version_dataset_metadata(clients): assert editor_response_data["contributors"][0]["name_type"] == "Personal" assert editor_response_data["contributors"][0]["contributor_type"] == "Con Type" assert editor_response_data["dates"][0]["date"] == "01-01-1970" - assert editor_response_data["dates"][0]["type"] == "Type" + assert editor_response_data["dates"][0]["type"] == "Accepted" assert editor_response_data["creators"][0]["first_name"] == "Given Name here" assert editor_response_data["creators"][0]["last_name"] == "Family Name here" assert editor_response_data["creators"][0]["name_type"] == "Personal" From 2983bb75d8b0e1dbeead57356fe504ed3319a9d3 Mon Sep 17 00:00:00 2001 From: Aydan <62059163+Aydawka@users.noreply.github.com> Date: Tue, 25 Mar 2025 13:36:42 -0700 Subject: [PATCH 488/505] =?UTF-8?q?fix:=20=20=F0=9F=90=9B=20study=20metada?= =?UTF-8?q?ta=20cleanup=20(#64)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: study metadata folders * fix: format * fix: enambe restx validation for study metadata * fix: study metadata restx validation --- apis/__init__.py | 10 - .../dataset_data_management.py | 2 +- apis/study_metadata/study_collaborators.py | 107 ------- apis/study_metadata/study_conditions.py | 91 ------ apis/study_metadata/study_description.py | 191 +++++++++--- apis/study_metadata/study_design.py | 1 + apis/study_metadata/study_eligibility.py | 1 + apis/study_metadata/study_identification.py | 121 -------- apis/study_metadata/study_intervention.py | 2 +- apis/study_metadata/study_keywords.py | 92 ------ apis/study_metadata/study_location.py | 1 + apis/study_metadata/study_other.py | 256 ---------------- apis/study_metadata/study_overall_official.py | 15 +- apis/study_metadata/study_oversight.py | 14 +- apis/study_metadata/study_status.py | 1 + apis/study_metadata/study_team.py | 290 ++++++++++-------- .../functional/test_050_study_metadata_api.py | 1 - 17 files changed, 330 insertions(+), 866 deletions(-) delete mode 100644 apis/study_metadata/study_collaborators.py delete mode 100644 apis/study_metadata/study_conditions.py delete mode 100644 apis/study_metadata/study_identification.py delete mode 100644 apis/study_metadata/study_keywords.py delete mode 100644 apis/study_metadata/study_other.py diff --git a/apis/__init__.py b/apis/__init__.py index 8def85ac..71482d7d 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -23,16 +23,11 @@ from .study import api as study_api from .study_metadata.study_arm import api as arm from .study_metadata.study_central_contact import api as central_contact -from .study_metadata.study_collaborators import api as collaborators -from .study_metadata.study_conditions import api as conditions from .study_metadata.study_description import api as study_description from .study_metadata.study_design import api as design from .study_metadata.study_eligibility import api as eligibility -from .study_metadata.study_identification import api as identification from .study_metadata.study_intervention import api as intervention -# from .study_metadata.study_other import api as other -from .study_metadata.study_keywords import api as keywords from .study_metadata.study_location import api as location from .study_metadata.study_overall_official import api as overall_official from .study_metadata.study_oversight import api as oversight @@ -69,16 +64,11 @@ "eligibility", "intervention", "location", - # "other", - "keywords", - "conditions", "oversight", "overall_official", "sponsors", - "collaborators", "status", "user", - "identification", "study_description", "dataset_team", "redcap", diff --git a/apis/dataset_metadata/dataset_data_management.py b/apis/dataset_metadata/dataset_data_management.py index 7c8a2089..2cea865e 100644 --- a/apis/dataset_metadata/dataset_data_management.py +++ b/apis/dataset_metadata/dataset_data_management.py @@ -163,7 +163,7 @@ def post(self, study_id: int, dataset_id: int): ], }, }, - "required": [], + "required": ["consent", "subjects", "deident"], } try: validate(instance=request.json, schema=schema) diff --git a/apis/study_metadata/study_collaborators.py b/apis/study_metadata/study_collaborators.py deleted file mode 100644 index 296a5acb..00000000 --- a/apis/study_metadata/study_collaborators.py +++ /dev/null @@ -1,107 +0,0 @@ -"""API routes for study collaborators metadata""" - -from flask import Response -from flask_restx import Resource - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -# -# study_collaborators = api.model( -# "StudyCollaborators", -# { -# "id": fields.String(required=True), -# "name": fields.String(required=True), -# "identifier": fields.String(required=True), -# "scheme": fields.String(required=True), -# "scheme_uri": fields.String(required=True), -# }, -# ) - -# -# @api.route("/study//metadata/collaborators") -# class StudyCollaboratorsResource(Resource): -# """Study Collaborators Metadata""" -# -# @api.doc("collaborators") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_collaborators) -# def get(self, study_id: int): -# """Get study collaborators metadata""" -# study_ = model.Study.query.get(study_id) -# study_collaborators_ = study_.study_collaborators -# -# return [collab.to_dict() for collab in study_collaborators_], 200 -# -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def post(self, study_id: int): -# """updating study collaborators""" -# # Schema validation -# schema = { -# "type": "array", -# "additionalProperties": False, -# "items": { -# "type": "object", -# "properties": { -# "id": {"type": "string"}, -# "name": {"type": "string"}, -# "identifier": {"type": "string"}, -# "identifier_scheme": {"type": "string"}, -# "identifier_scheme_uri": {"type": "string"}, -# }, -# "required": [ -# "name", -# "identifier", -# "identifier_scheme", -# ], -# }, -# } -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 -# -# data: typing.Union[dict, typing.Any] = request.json -# -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# list_of_elements = [] -# for i in data: -# if "id" in i and i["id"]: -# study_collaborators_ = model.StudyCollaborators.query.get(i["id"]) -# study_collaborators_.update(i) -# else: -# study_collaborators_ = model.StudyCollaborators.from_data(study_obj, i) -# model.db.session.add(study_collaborators_) -# list_of_elements.append(study_collaborators_.to_dict()) -# model.db.session.commit() -# -# return list_of_elements, 201 -# - - -@api.route("/study//metadata/collaborators/") -class StudyLocationUpdate(Resource): - """delete Study Collaborators Metadata""" - - @api.doc("delete study collaborators") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, collaborator_id: int): - """Delete study collaborators metadata""" - study_obj = model.Study.query.get(study_id) - if not is_granted("study_metadata", study_obj): - return "Access denied, you can not delete study", 403 - study_collaborators_ = model.StudyCollaborators.query.get(collaborator_id) - - model.db.session.delete(study_collaborators_) - - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_conditions.py b/apis/study_metadata/study_conditions.py deleted file mode 100644 index c812a07d..00000000 --- a/apis/study_metadata/study_conditions.py +++ /dev/null @@ -1,91 +0,0 @@ -"""API routes for study other metadata""" - -from flask import Response -from flask_restx import Resource - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -# @api.route("/study//metadata/conditions") -# class StudyCondition(Resource): -# """Study Conditions Metadata""" -# -# @api.doc("conditions") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study conditions metadata""" -# study_ = model.Study.query.get(study_id) -# -# study_conditions = study_.study_conditions -# -# return [s.to_dict() for s in study_conditions], 200 -# -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def post(self, study_id: int): -# """Create study condition metadata""" -# # Schema validation -# schema = { -# "type": "array", -# "additionalProperties": False, -# "items": { -# "type": "object", -# "properties": { -# "id": {"type": "string"}, -# "name": {"type": "string", "minLength": 1}, -# "classification_code": {"type": "string"}, -# "scheme": {"type": "string"}, -# "scheme_uri": {"type": "string"}, -# "condition_uri": {"type": "string"}, -# }, -# "required": ["name", "classification_code", "condition_uri"], -# }, -# } -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# data: typing.Union[dict, typing.Any] = request.json -# list_of_elements = [] -# for i in data: -# if "id" in i and i["id"]: -# study_conditions_ = model.StudyConditions.query.get(i["id"]) -# if not study_conditions_: -# return f"Study condition {i['id']} Id is not found", 404 -# study_conditions_.update(i) -# list_of_elements.append(study_conditions_.to_dict()) -# elif "id" not in i or not i["id"]: -# study_conditions_ = model.StudyConditions.from_data(study_obj, i) -# model.db.session.add(study_conditions_) -# list_of_elements.append(study_conditions_.to_dict()) -# model.db.session.commit() -# return list_of_elements, 201 - - -@api.route("/study//metadata/conditions/") -class StudyConditionsUpdate(Resource): - """Study Conditions Metadata update""" - - @api.doc("Delete Study Identifications") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, condition_id: int): - """Delete study conditions metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - - study_conditions_ = model.StudyConditions.query.get(condition_id) - - model.db.session.delete(study_conditions_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index a47ca2b2..7641c93f 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -2,7 +2,7 @@ import typing -from flask import request +from flask import Response, request from flask_restx import Resource, fields from jsonschema import ValidationError, validate @@ -12,48 +12,82 @@ from ..authentication import is_granted study_description = api.model( - "StudyDescription", + "StudyMetadataDescription", { - "id": fields.String(required=True), - "brief_summary": fields.String(required=True), - "detailed_description": fields.String(required=True), - }, -) - -study_other = api.model( - "StudyConditions", - { - "id": fields.String(required=True), - "name": fields.Boolean(required=True), - "classification_code": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "condition_uri": fields.String(required=True), - }, -) - -study_keywords = api.model( - "StudyKeywords", - { - "id": fields.String(required=True), - "name": fields.Boolean(required=True), - "classification_code": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "keyword_uri": fields.String(required=True), - }, -) - - -study_identification = api.model( - "StudyIdentification", - { - "id": fields.String(required=True), - "identifier": fields.String(required=True), - "identifier_type": fields.String(required=True), - "identifier_domain": fields.String(required=True), - "identifier_link": fields.String(required=True), - "secondary": fields.Boolean(required=True), + "description": fields.Nested( + api.model( + "StudyDescription", + { + "id": fields.String(required=True), + "brief_summary": fields.String(required=True), + "detailed_description": fields.String(required=True), + }, + ) + ), + "conditions": fields.List( + fields.Nested( + api.model( + "StudyConditions", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "condition_uri": fields.String(required=True), + }, + ) + ) + ), + "keywords": fields.List( + fields.Nested( + api.model( + "StudyKeywords", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "classification_code": fields.String(required=True), + "scheme": fields.String(required=True), + "scheme_uri": fields.String(required=True), + "keyword_uri": fields.String(required=True), + }, + ) + ) + ), + "identification": fields.Nested( + api.model( + "StudyIdentification", + { + "primary": fields.Nested( + api.model( + "PrimaryIdentification", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_domain": fields.String(required=True), + "identifier_link": fields.String(required=True), + }, + ) + ), + "secondary": fields.List( + fields.Nested( + api.model( + "SecondaryIdentification", + { + "id": fields.String(required=True), + "identifier": fields.String(required=True), + "identifier_type": fields.String(required=True), + "identifier_domain": fields.String(required=True), + "identifier_link": fields.String(required=True), + }, + ) + ), + required=True, + ), + }, + ) + ), }, ) @@ -65,7 +99,7 @@ class StudyDescriptionResource(Resource): @api.doc("description") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(study_description) + @api.marshal_with(study_description) def get(self, study_id: int): """Get study description metadata""" study_ = model.Study.query.get(study_id) @@ -74,21 +108,23 @@ def get(self, study_id: int): study_conditions = study_.study_conditions study_description_ = study_.study_description return { - "identification": identifiers.to_dict(), "keywords": [k.to_dict() for k in study_keywords], "conditions": [c.to_dict() for c in study_conditions], "description": study_description_.to_dict(), + "identification": identifiers.to_dict(), + }, 200 @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_description) def post(self, study_id: int): """Update study description metadata""" # Schema validation schema = { "type": "object", "additionalProperties": False, - "required": [], + "required": ["conditions", "keywords", "description", "identification"], "properties": { "conditions": { "type": "array", @@ -222,3 +258,68 @@ def post(self, study_id: int): "keywords": list_of_keywords, "identification": final_identifiers.to_dict(), }, 201 + + +@api.route("/study//metadata/keywords/") +class StudyKeywordsDelete(Resource): + """Study keywords Metadata update""" + + @api.doc("Delete Study Keywords") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, keyword_id: int): + """Delete study conditions metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + + study_keywords_ = model.StudyKeywords.query.get(keyword_id) + + model.db.session.delete(study_keywords_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//metadata/conditions/") +class StudyConditionsUpdate(Resource): + """Study Conditions Metadata update""" + + @api.doc("Delete Study Identifications") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, condition_id: int): + """Delete study conditions metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + + study_conditions_ = model.StudyConditions.query.get(condition_id) + + model.db.session.delete(study_conditions_) + model.db.session.commit() + + return Response(status=204) + + +@api.route("/study//metadata/identification/") +class StudyIdentificationdDelete(Resource): + """Study Identification Metadata""" + + @api.doc("Delete Study Identifications") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, identification_id: int): + """Delete study identification metadata""" + study = model.Study.query.get(study_id) + if not is_granted("study_metadata", study): + return "Access denied, you can not delete study", 403 + + study_identification_ = model.StudyIdentification.query.get(identification_id) + if not study_identification_.secondary: + return "primary identifier can not be deleted", 400 + + model.db.session.delete(study_identification_) + model.db.session.commit() + + return Response(status=204) diff --git a/apis/study_metadata/study_design.py b/apis/study_metadata/study_design.py index 8a8fb0f8..ee5e2f63 100644 --- a/apis/study_metadata/study_design.py +++ b/apis/study_metadata/study_design.py @@ -55,6 +55,7 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_design) def put(self, study_id: int): """Update study design metadata""" # Schema validation diff --git a/apis/study_metadata/study_eligibility.py b/apis/study_metadata/study_eligibility.py index 7d2bebf7..32b1bc36 100644 --- a/apis/study_metadata/study_eligibility.py +++ b/apis/study_metadata/study_eligibility.py @@ -47,6 +47,7 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_eligibility) def put(self, study_id: int): """Update study eligibility metadata""" # Schema validation diff --git a/apis/study_metadata/study_identification.py b/apis/study_metadata/study_identification.py deleted file mode 100644 index 2ff70816..00000000 --- a/apis/study_metadata/study_identification.py +++ /dev/null @@ -1,121 +0,0 @@ -"""API routes for study identification metadata""" - -from flask import Response -from flask_restx import Resource - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -# @api.route("/study//metadata/identification") -# class StudyIdentificationResource(Resource): -# """Study Identification Metadata""" -# -# @api.doc("identification") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.param("id", "The study identifier") -# # @api.marshal_with(study_identification) -# def get(self, study_id: int): -# """Get study identification metadata""" -# study_ = model.Study.query.get(study_id) -# identifiers = model.Identifiers(study_) -# return identifiers.to_dict(), 200 -# -# @api.doc("identification add") -# @api.response(201, "Success") -# @api.response(400, "Validation Error") -# @api.expect(study_identification) -# def post(self, study_id: int): -# """Create study identification metadata""" -# # Schema validation -# schema = { -# "type": "object", -# "additionalProperties": False, -# "properties": { -# "primary": { -# "type": "object", -# "additionalProperties": False, -# "properties": { -# "identifier": {"type": "string", "minLength": 1}, -# "identifier_type": { -# "type": "string", -# "minLength": 1, -# }, -# "identifier_domain": { -# "type": "string", -# }, -# "identifier_link": { -# "type": "string", -# }, -# }, -# }, -# "secondary": { -# "type": "array", -# }, -# }, -# } -# -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 -# -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# data: typing.Union[dict, typing.Any] = request.json -# identifiers = [i for i in study_obj.study_identification if not i.secondary] -# primary_identifier = identifiers[0] if len(identifiers) else None -# -# primary: dict = data["primary"] -# -# if primary_identifier: -# primary_identifier.update(primary) -# else: -# study_identification_ = model.StudyIdentification.from_data( -# study_obj, primary, False -# ) -# model.db.session.add(study_identification_) -# -# for i in data["secondary"]: -# i["secondary"] = True -# if "id" in i and i["id"]: -# study_identification_ = model.StudyIdentification.query.get(i["id"]) -# study_identification_.update(i) -# else: -# study_identification_ = model.StudyIdentification.from_data( -# study_obj, i, True -# ) -# model.db.session.add(study_identification_) -# -# model.db.session.commit() -# -# final_identifiers = model.Identifiers(study_obj) -# -# return final_identifiers.to_dict(), 201 - - -@api.route("/study//metadata/identification/") -class StudyIdentificationdDelete(Resource): - """Study Identification Metadata""" - - @api.doc("Delete Study Identifications") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, identification_id: int): - """Delete study identification metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - - study_identification_ = model.StudyIdentification.query.get(identification_id) - if not study_identification_.secondary: - return "primary identifier can not be deleted", 400 - - model.db.session.delete(study_identification_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_intervention.py b/apis/study_metadata/study_intervention.py index fb3641a6..bb76984f 100644 --- a/apis/study_metadata/study_intervention.py +++ b/apis/study_metadata/study_intervention.py @@ -41,11 +41,11 @@ def get(self, study_id: int): sorted_study_intervention = sorted( study_intervention_, key=lambda x: x.created_at ) - return [s.to_dict() for s in sorted_study_intervention], 200 @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_intervention) def post(self, study_id: int): """Create study intervention metadata""" # Schema validation diff --git a/apis/study_metadata/study_keywords.py b/apis/study_metadata/study_keywords.py deleted file mode 100644 index 27f90ea7..00000000 --- a/apis/study_metadata/study_keywords.py +++ /dev/null @@ -1,92 +0,0 @@ -"""API routes for study other metadata""" - -from flask import Response -from flask_restx import Resource - -import model -from apis.study_metadata_namespace import api - -from ..authentication import is_granted - -# -# @api.route("/study//metadata/keywords") -# class StudyKeywords(Resource): -# """Study Keywords Metadata""" -# -# @api.doc("keywords") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study keywords metadata""" -# study_ = model.Study.query.get(study_id) -# study_keywords = study_.study_keywords -# -# return [k.to_dict() for k in study_keywords], 200 -# -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def post(self, study_id: int): -# """Create study keywords metadata""" -# # Schema validation -# schema = { -# "type": "array", -# "additionalProperties": False, -# "items": { -# "type": "object", -# "properties": { -# "id": {"type": "string"}, -# "name": {"type": "string", "minLength": 1}, -# "classification_code": {"type": "string"}, -# "scheme": {"type": "string"}, -# "scheme_uri": {"type": "string"}, -# "keyword_uri": {"type": "string"}, -# }, -# "required": ["name", "classification_code", "keyword_uri"], -# }, -# } -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 -# -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# data: typing.Union[dict, typing.Any] = request.json -# list_of_elements = [] -# for i in data: -# if "id" in i and i["id"]: -# study_keywords_ = model.StudyKeywords.query.get(i["id"]) -# if not study_keywords_: -# return f"Study keywords {i['id']} Id is not found", 404 -# study_keywords_.update(i) -# list_of_elements.append(study_keywords_.to_dict()) -# elif "id" not in i or not i["id"]: -# study_keywords_ = model.StudyKeywords.from_data(study_obj, i) -# model.db.session.add(study_keywords_) -# list_of_elements.append(study_keywords_.to_dict()) -# model.db.session.commit() -# return list_of_elements, 201 - - -@api.route("/study//metadata/keywords/") -class StudyKeywordsDelete(Resource): - """Study keywords Metadata update""" - - @api.doc("Delete Study Keywords") - @api.response(204, "Success") - @api.response(400, "Validation Error") - def delete(self, study_id: int, keyword_id: int): - """Delete study conditions metadata""" - study = model.Study.query.get(study_id) - if not is_granted("study_metadata", study): - return "Access denied, you can not delete study", 403 - - study_keywords_ = model.StudyKeywords.query.get(keyword_id) - - model.db.session.delete(study_keywords_) - model.db.session.commit() - - return Response(status=204) diff --git a/apis/study_metadata/study_location.py b/apis/study_metadata/study_location.py index b017e235..73f860b6 100644 --- a/apis/study_metadata/study_location.py +++ b/apis/study_metadata/study_location.py @@ -46,6 +46,7 @@ def get(self, study_id: int): @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_location) def post(self, study_id: int): """Create study location metadata""" # Schema validation diff --git a/apis/study_metadata/study_other.py b/apis/study_metadata/study_other.py deleted file mode 100644 index 5355004d..00000000 --- a/apis/study_metadata/study_other.py +++ /dev/null @@ -1,256 +0,0 @@ -# """API routes for study other metadata""" -# -# import typing -# -# from flask import request, Response -# from flask_restx import Resource, fields -# from jsonschema import ValidationError, validate -# -# import model -# from apis.study_metadata_namespace import api -# -# from ..authentication import is_granted -# -# study_other = api.model( -# "StudyOther", -# { -# "id": fields.String(required=True), -# "oversight_has_dmc": fields.Boolean(required=True), -# "conditions": fields.String(required=True), -# "keywords": fields.String(required=True), -# "size": fields.String(required=True), -# }, -# ) -# -# -# @api.route("/study//metadata/oversight") -# class StudyOversightResource(Resource): -# """Study Oversight Metadata""" -# -# @api.doc("oversight") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study oversight metadata""" -# study_ = model.Study.query.get(study_id) -# -# study_oversight_has_dmc = study_.study_oversight -# return study_oversight_has_dmc.to_dict(), 200 -# -# def put(self, study_id: int): -# """Update study oversight metadata""" -# # Schema validation -# schema = { -# "type": "object", -# "additionalProperties": False, -# "properties": {"oversight_has_dmc": {"type": "boolean"}}, -# "required": ["has_dmc"], -# } -# -# try: -# validate(request.json, schema) -# except ValidationError as e: -# return e.message, 400 -# -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# data: typing.Union[dict, typing.Any] = request.json -# study_oversight_ = study_obj.study_oversight.update(data) -# model.db.session.commit() -# return study_obj.study_oversight.to_dict(), 200 - - -# @api.route("/study//metadata/conditions") -# class StudyCondition(Resource): -# """Study Conditions Metadata""" -# -# @api.doc("conditions") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study conditions metadata""" -# study_ = model.Study.query.get(study_id) -# -# study_conditions = study_.study_conditions -# -# return [s.to_dict() for s in study_conditions], 200 -# -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def post(self, study_id: int): -# """Create study condition metadata""" -# # Schema validation -# # schema = { -# # "type": "array", -# # "additionalProperties": False, -# # "items": { -# # "type": "object", -# # "properties": { -# # "id": {"type": "string"}, -# # "facility": {"type": "string", "minLength": 1}, -# # "status": { -# # "type": "string", -# # "enum": [ -# # "Withdrawn", -# # "Recruiting", -# # "Active, not recruiting", -# # "Not yet recruiting", -# # "Suspended", -# # "Enrolling by invitation", -# # "Completed", -# # "Terminated", -# # ], -# # }, -# # "city": {"type": "string", "minLength": 1}, -# # "state": {"type": "string"}, -# # "zip": {"type": "string"}, -# # "country": {"type": "string", "minLength": 1}, -# # }, -# # "required": ["facility", "status", "city", "country"], -# # }, -# # } -# # -# # try: -# # validate(request.json, schema) -# # except ValidationError as e: -# # return e.message, 400 -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# data: typing.Union[dict, typing.Any] = request.json -# list_of_elements = [] -# for i in data: -# if "id" in i and i["id"]: -# study_conditions_ = model.StudyConditions.query.get(i["id"]) -# if not study_conditions_: -# return f"Study condition {i['id']} Id is not found", 404 -# study_conditions_.update(i) -# list_of_elements.append(study_conditions_.to_dict()) -# elif "id" not in i or not i["id"]: -# study_conditions_ = model.StudyConditions.from_data(study_obj, i) -# model.db.session.add(study_conditions_) -# list_of_elements.append(study_conditions_.to_dict()) -# model.db.session.commit() -# return list_of_elements, 201 -# -# -# @api.route("/study//metadata/conditions/") -# class StudyConditionsUpdate(Resource): -# """Study Conditions Metadata update""" -# -# @api.doc("Delete Study Identifications") -# @api.response(204, "Success") -# @api.response(400, "Validation Error") -# def delete(self, study_id: int, condition_id: int): -# """Delete study conditions metadata""" -# study = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study): -# return "Access denied, you can not delete study", 403 -# -# study_conditions_ = model.StudyConditions.query.get(condition_id) -# -# model.db.session.delete(study_conditions_) -# model.db.session.commit() -# -# return Response(status=204) - - -# @api.route("/study//metadata/keywords") -# class StudyKeywords(Resource): -# """Study Keywords Metadata""" -# -# @api.doc("keywords") -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# # @api.marshal_with(study_other) -# def get(self, study_id: int): -# """Get study keywords metadata""" -# study_ = model.Study.query.get(study_id) -# -# study_keywords = study_.study_keywords -# -# return [k.to_dict() for k in study_keywords], 200 -# -# @api.response(200, "Success") -# @api.response(400, "Validation Error") -# def post(self, study_id: int): -# """Create study condition metadata""" -# # Schema validation -# # schema = { -# # "type": "array", -# # "additionalProperties": False, -# # "items": { -# # "type": "object", -# # "properties": { -# # "id": {"type": "string"}, -# # "facility": {"type": "string", "minLength": 1}, -# # "status": { -# # "type": "string", -# # "enum": [ -# # "Withdrawn", -# # "Recruiting", -# # "Active, not recruiting", -# # "Not yet recruiting", -# # "Suspended", -# # "Enrolling by invitation", -# # "Completed", -# # "Terminated", -# # ], -# # }, -# # "city": {"type": "string", "minLength": 1}, -# # "state": {"type": "string"}, -# # "zip": {"type": "string"}, -# # "country": {"type": "string", "minLength": 1}, -# # }, -# # "required": ["facility", "status", "city", "country"], -# # }, -# # } -# # -# # try: -# # validate(request.json, schema) -# # except ValidationError as e: -# # return e.message, 400 -# study_obj = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study_obj): -# return "Access denied, you can not modify study", 403 -# -# data: typing.Union[dict, typing.Any] = request.json -# list_of_elements = [] -# for i in data: -# if "id" in i and i["id"]: -# study_keywords_ = model.StudyKeywords.query.get(i["id"]) -# if not study_keywords_: -# return f"Study keywords {i['id']} Id is not found", 404 -# study_keywords_.update(i) -# list_of_elements.append(study_keywords_.to_dict()) -# elif "id" not in i or not i["id"]: -# study_keywords_ = model.StudyKeywords.from_data(study_obj, i) -# model.db.session.add(study_keywords_) -# list_of_elements.append(study_keywords_.to_dict()) -# model.db.session.commit() -# return list_of_elements, 201 -# -# -# @api.route("/study//metadata/keywords/") -# class StudyKeywordsDelete(Resource): -# """Study keywords Metadata update""" -# -# @api.doc("Delete Study Keywords") -# @api.response(204, "Success") -# @api.response(400, "Validation Error") -# def delete(self, study_id: int, keyword_id: int): -# """Delete study conditions metadata""" -# study = model.Study.query.get(study_id) -# if not is_granted("study_metadata", study): -# return "Access denied, you can not delete study", 403 -# -# study_keywords_ = model.StudyKeywords.query.get(keyword_id) -# -# model.db.session.delete(study_keywords_) -# model.db.session.commit() -# -# return Response(status=204) diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index 6a11c576..ec3a2e9f 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -15,9 +15,17 @@ "StudyOverallOfficial", { "id": fields.String(required=True), - "name": fields.String(required=True), + "first_name": fields.String(required=True), + "last_name": fields.String(required=True), + "identifier": fields.String(required=False), + "identifier_scheme": fields.String(required=False), + "identifier_scheme_uri": fields.String(required=False), "affiliation": fields.String(required=True), - "role": fields.String(required=True), + "affiliation_identifier": fields.String(required=True), + "affiliation_identifier_scheme": fields.String(required=False), + "affiliation_identifier_scheme_uri": fields.String(required=False), + "role": fields.String(required=True), # Allows null in JSON Schema but RESTx doesn't support nullable fields + "degree": fields.String(required=False), }, ) @@ -30,7 +38,7 @@ class StudyOverallOfficialResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - # @api.marshal_with(study_overall_official) + @api.marshal_with(study_overall_official) def get(self, study_id: int): """Get study overall official metadata""" study_ = model.Study.query.get(study_id) @@ -47,6 +55,7 @@ def get(self, study_id: int): @api.response(201, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_overall_official) def post(self, study_id: int): """Create study overall official metadata""" # Schema validation diff --git a/apis/study_metadata/study_oversight.py b/apis/study_metadata/study_oversight.py index 179f4384..9b7e008c 100644 --- a/apis/study_metadata/study_oversight.py +++ b/apis/study_metadata/study_oversight.py @@ -14,11 +14,10 @@ study_other = api.model( "StudyOversight", { - "id": fields.String(required=True), - "oversight_has_dmc": fields.Boolean(required=True), - "conditions": fields.String(required=True), - "keywords": fields.String(required=True), - "size": fields.String(required=True), + "has_dmc": fields.String(required=True), + "fda_regulated_drug": fields.String(required=True), + "fda_regulated_device": fields.String(required=True), + "human_subject_review_status": fields.String(required=True), }, ) @@ -30,7 +29,7 @@ class StudyOversightResource(Resource): @api.doc("oversight") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with(study_other) + @api.marshal_with(study_other) def get(self, study_id: int): """Get study oversight metadata""" study_ = model.Study.query.get(study_id) @@ -38,6 +37,9 @@ def get(self, study_id: int): study_oversight_has_dmc = study_.study_oversight return study_oversight_has_dmc.to_dict(), 200 + @api.response(200, "Success") + @api.response(400, "Validation Error") + @api.marshal_with(study_other) def put(self, study_id: int): """Update study oversight metadata""" # Schema validation diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index d0edbfce..41a7c51e 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -44,6 +44,7 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_status) def put(self, study_id: int): """Update study status metadata""" # Schema validation diff --git a/apis/study_metadata/study_team.py b/apis/study_metadata/study_team.py index fc3c5697..ce22f2e3 100644 --- a/apis/study_metadata/study_team.py +++ b/apis/study_metadata/study_team.py @@ -2,54 +2,63 @@ import typing -from flask import request +from flask import Response, request from flask_restx import Resource, fields import model from apis.study_metadata_namespace import api +from jsonschema import ValidationError, validate from ..authentication import is_granted -study_sponsors = api.model( - "StudySponsors", +study_team_metadata = api.model( + "StudyTeamMetadata", { - "responsible_party_type": fields.String(required=False), - "responsible_party_investigator_first_name": fields.String(required=True), - "responsible_party_investigator_last_name": fields.String(required=True), - "responsible_party_investigator_title": fields.String(required=True), - "responsible_party_investigator_identifier_value": fields.String(required=True), - "responsible_party_investigator_identifier_scheme": fields.String( - required=True + "sponsors": fields.Nested( # Changed to Nested to make it a single object + api.model( + "StudySponsors", + { + "responsible_party_type": fields.String( + required=True, + enum=[ + "Sponsor", + "Principal Investigator", + "Sponsor-Investigator", + ] + ), + "responsible_party_investigator_first_name": fields.String(required=True), + "responsible_party_investigator_last_name": fields.String(required=True), + "responsible_party_investigator_title": fields.String(required=True), + "lead_sponsor_name": fields.String(required=True), + "responsible_party_investigator_identifier_value": fields.String(required=False), + "responsible_party_investigator_identifier_scheme": fields.String(required=False), + "responsible_party_investigator_identifier_scheme_uri": fields.String(required=False), + "responsible_party_investigator_affiliation_name": fields.String(required=False), + "responsible_party_investigator_affiliation_identifier_scheme": fields.String(required=False), + "responsible_party_investigator_affiliation_identifier_value": fields.String(required=False), + "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String(required=False), + "lead_sponsor_identifier": fields.String(required=False), + "lead_sponsor_identifier_scheme": fields.String(required=False), + "lead_sponsor_identifier_scheme_uri": fields.String(required=False), + } + ) ), - "responsible_party_investigator_identifier_scheme_uri": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_name": fields.String(required=True), - "responsible_party_investigator_affiliation_identifier_scheme": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_identifier_value": fields.String( - required=True - ), - "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String( - required=True - ), - "lead_sponsor_name": fields.String(required=True), - "lead_sponsor_identifier": fields.String(required=True), - "lead_sponsor_identifier_scheme": fields.String(required=True), - "lead_sponsor_identifier_scheme_uri": fields.String(required=True), - }, -) -study_collaborators = api.model( - "StudyCollaborators", - { - "id": fields.String(required=True), - "name": fields.String(required=True), - "identifier": fields.String(required=True), - "scheme": fields.String(required=True), - "scheme_uri": fields.String(required=True), - "created_at": fields.Integer(required=True), + "collaborators": fields.List( + fields.Nested( + api.model( + "StudyCollaborators", + { + "id": fields.String(required=True), + "name": fields.String(required=True), + "identifier": fields.String(required=False), + "identifier_scheme": fields.String(required=False), + "identifier_scheme_uri": fields.String(required=False), + "created_at": fields.Integer(required=True), + }, + ) + ) + ), }, ) @@ -61,12 +70,7 @@ class StudySponsorsResource(Resource): @api.doc("sponsors") @api.response(200, "Success") @api.response(400, "Validation Error") - # @api.marshal_with( - # { - # "sponsors": study_sponsors, - # "collaborators": study_collaborators - # } - # ) + @api.marshal_with(study_team_metadata) def get(self, study_id: int): """Get study team metadata""" study_ = model.Study.query.get(study_id) @@ -83,97 +87,98 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") + @api.marshal_with(study_team_metadata) def post(self, study_id: int): """Update study team metadata""" # Schema validation - # schema = { - # "type": "object", - # "additionalProperties": False, - # "properties": { - # "collaborators": { - # "type": "array", - # "additionalProperties": False, - # "items": { - # "type": "object", - # "properties": { - # "id": {"type": "string"}, - # "name": {"type": "string"}, - # "identifier": {"type": "string"}, - # "identifier_scheme": {"type": "string"}, - # "identifier_scheme_uri": {"type": "string"}, - # }, - # "required": [ - # "name", - # "identifier", - # "identifier_scheme", - # ], - # }, - # }, - # "sponsors": - # { - # "type": "object", - # "additionalProperties": False, - # "properties": { - # "responsible_party_type": { - # "type": ["string", "null"], - # "enum": [ - # "Sponsor", - # "Principal Investigator", - # "Sponsor-Investigator", - # ], - # }, - # "responsible_party_investigator_first_name": { - # "type": "string", - # }, - # "responsible_party_investigator_last_name": { - # "type": "string", - # }, - # "responsible_party_investigator_title": { - # "type": "string", - # }, - # "responsible_party_investigator_identifier_value": { - # "type": "string", - # }, - # "responsible_party_investigator_identifier_scheme": { - # "type": "string", - # }, - # "responsible_party_investigator_identifier_scheme_uri": { - # "type": "string", - # }, - # "responsible_party_investigator_affiliation_name": { - # "type": "string", - # }, - # "responsible_party_investigator_affiliation_identifier_scheme": { - # "type": "string", - # }, - # "responsible_party_investigator_affiliation_identifier_value": { - # "type": "string", - # }, - # "responsible_party_investigator_affiliation_identifier_scheme_uri": { - # "type": "string", - # }, - # "lead_sponsor_name": {"type": "string"}, - # "lead_sponsor_identifier": {"type": "string"}, - # "lead_sponsor_identifier_scheme": {"type": "string"}, - # "lead_sponsor_identifier_scheme_uri": { - # "type": "string", - # }, - # }, - # "required": [ - # "responsible_party_type", - # "lead_sponsor_name", - # "responsible_party_investigator_last_name", - # "responsible_party_investigator_first_name", - # "responsible_party_investigator_title", - # ], - # } - # } - # } - # - # try: - # validate(request.json, schema) - # except ValidationError as e: - # return e.message, 400 + schema = { + "type": "object", + "additionalProperties": False, + "required": ["collaborators", "sponsors"], + "properties": { + "collaborators": { + "type": "array", + "additionalProperties": False, + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string"}, + "identifier": {"type": "string"}, + "identifier_scheme": {"type": "string"}, + "identifier_scheme_uri": {"type": "string"}, + }, + "required": [ + "name", + "identifier", + "identifier_scheme", + ], + }, + }, + "sponsors": { + "type": "object", + "additionalProperties": False, + "properties": { + "responsible_party_type": { + "type": ["string", "null"], + "enum": [ + "Sponsor", + "Principal Investigator", + "Sponsor-Investigator", + ], + }, + "responsible_party_investigator_first_name": { + "type": "string", + }, + "responsible_party_investigator_last_name": { + "type": "string", + }, + "responsible_party_investigator_title": { + "type": "string", + }, + "responsible_party_investigator_identifier_value": { + "type": "string", + }, + "responsible_party_investigator_identifier_scheme": { + "type": "string", + }, + "responsible_party_investigator_identifier_scheme_uri": { + "type": "string", + }, + "responsible_party_investigator_affiliation_name": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_scheme": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_value": { + "type": "string", + }, + "responsible_party_investigator_affiliation_identifier_scheme_uri": { + "type": "string", + }, + "lead_sponsor_name": {"type": "string"}, + "lead_sponsor_identifier": {"type": "string"}, + "lead_sponsor_identifier_scheme": {"type": "string"}, + "lead_sponsor_identifier_scheme_uri": { + "type": "string", + }, + }, + "required": [ + "responsible_party_type", + "lead_sponsor_name", + "responsible_party_investigator_last_name", + "responsible_party_investigator_first_name", + "responsible_party_investigator_title", + ], + }, + }, + } + + try: + validate(request.json, schema) + except ValidationError as e: + return e.message, 400 data: typing.Union[dict, typing.Any] = request.json if data["sponsors"]["responsible_party_type"] in [ @@ -229,3 +234,24 @@ def post(self, study_id: int): "collaborators": list_of_elements, "sponsors": study_.study_sponsors.to_dict(), }, 201 + + +@api.route("/study//metadata/collaborators/") +class StudyLocationUpdate(Resource): + """delete Study Collaborators Metadata""" + + @api.doc("delete study collaborators") + @api.response(204, "Success") + @api.response(400, "Validation Error") + def delete(self, study_id: int, collaborator_id: int): + """Delete study collaborators metadata""" + study_obj = model.Study.query.get(study_id) + if not is_granted("study_metadata", study_obj): + return "Access denied, you can not delete study", 403 + study_collaborators_ = model.StudyCollaborators.query.get(collaborator_id) + + model.db.session.delete(study_collaborators_) + + model.db.session.commit() + + return Response(status=204) diff --git a/tests/functional/test_050_study_metadata_api.py b/tests/functional/test_050_study_metadata_api.py index 14fa3225..c5eb3259 100644 --- a/tests/functional/test_050_study_metadata_api.py +++ b/tests/functional/test_050_study_metadata_api.py @@ -3222,7 +3222,6 @@ def test_get_overall_official_metadata(clients): admin_response_data = json.loads(admin_response.data) editor_response_data = json.loads(editor_response.data) viewer_response_data = json.loads(viewer_response.data) - assert response_data[0]["first_name"] == "test" assert response_data[0]["last_name"] == "test" assert response_data[0]["affiliation"] == "aff" From b07423e2e3651837177a7d1fc1199acdc813e382 Mon Sep 17 00:00:00 2001 From: Aydan <62059163+Aydawka@users.noreply.github.com> Date: Wed, 2 Apr 2025 13:01:19 -0700 Subject: [PATCH 489/505] feat: add reset password endpoints (#57) * feat: add forgot&reset endpoints * fix: update forgot password auth * feat: add test to forgot password * fix: add test to forgot password * chore: modify dependencies * chore: modify dependencies * fix: dependencies * fix: dependencies * feat: add token column * feat: add token column * feat: add token column * fix: change receiver and msg * fix: add forgot and reset password * fix: comment out forgot and reset password * style: format --- .../df71df391cdb_add_token_to_user.py | 26 +++ apis/authentication.py | 202 +++++++++++++++++- apis/study_metadata/study_description.py | 1 - apis/study_metadata/study_overall_official.py | 4 +- apis/study_metadata/study_team.py | 47 ++-- model/user.py | 7 +- modules/invitation.py | 52 +++-- pyproject.toml | 4 +- templates/forgot_password.html | 56 +++++ templates/reset_password.html | 56 +++++ tests/conftest.py | 4 + tests/functional/test_070_user.py | 154 +++++++++++++ 12 files changed, 575 insertions(+), 38 deletions(-) create mode 100644 alembic/versions/df71df391cdb_add_token_to_user.py create mode 100644 templates/forgot_password.html create mode 100644 templates/reset_password.html diff --git a/alembic/versions/df71df391cdb_add_token_to_user.py b/alembic/versions/df71df391cdb_add_token_to_user.py new file mode 100644 index 00000000..b0a530e1 --- /dev/null +++ b/alembic/versions/df71df391cdb_add_token_to_user.py @@ -0,0 +1,26 @@ +"""add token to user + +Revision ID: df71df391cdb +Revises: 3ffefbd9c03b +Create Date: 2024-07-03 10:15:49.657807 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import uuid + + +# revision identifiers, used by Alembic. +revision: str = 'df71df391cdb' +down_revision: Union[str, None] = '0ff53a655198' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + +password_reset_token = str(uuid.uuid4()) + +def upgrade() -> None: + op.add_column( + "user", sa.Column("password_reset_token", sa.String, nullable=True) + ) diff --git a/apis/authentication.py b/apis/authentication.py index fc6f5da3..1e05eb41 100644 --- a/apis/authentication.py +++ b/apis/authentication.py @@ -13,12 +13,14 @@ import jwt from email_validator import EmailNotValidError, validate_email -from flask import g, make_response, request +from flask import g, make_response, request, Response from flask_restx import Namespace, Resource, fields from jsonschema import FormatChecker, ValidationError, validate import model +# from modules.invitation import reset_password, forgot_password + # from modules.invitation import ( # send_email_verification, # ) @@ -647,3 +649,201 @@ def session_logout(): model.db.session.delete(session) model.db.session.commit() # return "Sessions are removed successfully", 200 + + +@api.route("/auth/forgot-password") +class ForgotPassword(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self): + """function is used to reset password in case users forget""" + + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + + config_module = importlib.import_module(config_module_name) + + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module + + def validate_is_valid_email(instance): + email_address = instance + try: + validate_email(email_address) + return True + except EmailNotValidError as e: + raise ValidationError("Invalid email address format") from e + + # Schema validation + schema = { + "type": "object", + "required": ["email_address"], + "additionalProperties": False, + "properties": { + "email_address": {"type": "string", "format": "valid_email"} + }, + } + + format_checker = FormatChecker() + format_checker.checks("valid_email")(validate_is_valid_email) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + return e.message, 400 + + data: Union[Any, dict] = request.json + email_address: str = data["email_address"] + + user = model.User.query.filter( + model.User.email_address == email_address + ).first() + + if not user: + raise ValidationError("User associated with this email does not exist") + + expired_in = get_now() + datetime.timedelta(minutes=5) + jti = str(uuid.uuid4()) + reset_token = jwt.encode( + { + "user": user.id, + "exp": expired_in, + "jti": jti, + "email": email_address, + }, # noqa: W503 + config.FAIRHUB_SECRET, + algorithm="HS256", + ) + # email_address = email_address if user else "" + # first_name = user.user_details.first_name if user else "" + # last_name = user.user_details.last_name if user else "" + + # if g.gb.is_on("email-verification"): + # if os.environ.get("FLASK_ENV") != "testing": + # forgot_password(email_address, first_name, last_name, reset_token) + user.update_password_reset(reset_token) + model.db.session.commit() + + response = make_response("email is sent successfully", 200) + if os.environ.get("FLASK_ENV") == "testing": + response.headers.add("X-Token", reset_token) + return response + + +@api.route("/auth/reset-password") +class ResetPassword(Resource): + @api.response(200, "Success") + @api.response(400, "Validation Error") + def post(self): + """function is used to reset password in case users forget""" + if os.environ.get("FLASK_ENV") == "testing": + config_module_name = "pytest_config" + else: + config_module_name = "config" + + config_module = importlib.import_module(config_module_name) + + if os.environ.get("FLASK_ENV") == "testing": + # If testing, use the 'TestConfig' class for accessing 'secret' + config = config_module.TestConfig + else: + # If not testing, directly use the 'config' module + config = config_module + + data: Union[Any, dict] = request.json + + try: + decoded = jwt.decode( + data["token"], config.FAIRHUB_SECRET, algorithms=["HS256"] + ) + except (jwt.ExpiredSignatureError, jwt.DecodeError, jwt.InvalidSignatureError): + return Response(status=401) + user = model.User.query.filter( + model.User.email_address == decoded["email"] + ).first() + if not user: + raise ValidationError("Email doesnt exist") + + if data["token"] != user.password_reset_token: + return "Invalid token", 400 + + validate_pass = user.check_password(data["new_password"]) + if validate_pass: + return "old and new password can not be same. Please select a new one", 422 + + def confirm_new_password(instance): + new_password = data["new_password"] + confirm_password = instance + + if new_password != confirm_password: + raise ValidationError("New password and confirm password do not match") + + return True + + schema = { + "type": "object", + "required": ["new_password", "confirm_password", "token"], + "additionalProperties": False, + "properties": { + "new_password": {"type": "string", "minLength": 1}, + "token": {"type": "string", "minLength": 1}, + "confirm_password": { + "type": "string", + "minLength": 1, + "format": "password confirmation", + }, + }, + } + format_checker = FormatChecker() + + # format_checker.checks("current password")(validate_current_password) + format_checker.checks("password confirmation")(confirm_new_password) + + try: + validate( + instance=request.json, schema=schema, format_checker=format_checker + ) + except ValidationError as e: + return e.message, 400 + + user.set_password(data["new_password"]) + model.db.session.commit() + + user.update_password_reset(None) + model.db.session.commit() + + # email_address = user.email_address if user else "" + # first_name = user.user_details.first_name if user else "" + # last_name = user.user_details.last_name if user else "" + # if os.environ.get("FLASK_ENV") != "testing": + # if g.gb.is_on("email-verification"): + # if user: + # reset_password( + # email_address, + # first_name, + # last_name, + # ) + + return "Password reset successfully", 200 + + +frozen_date: Union[datetime.datetime, None] = None + + +def set_now(now: Union[datetime.datetime, None]) -> None: + global frozen_date + frozen_date = now + + +def get_now() -> datetime.datetime: + if frozen_date: + return frozen_date + return datetime.datetime.now(datetime.timezone.utc) diff --git a/apis/study_metadata/study_description.py b/apis/study_metadata/study_description.py index 7641c93f..12ca0fa0 100644 --- a/apis/study_metadata/study_description.py +++ b/apis/study_metadata/study_description.py @@ -112,7 +112,6 @@ def get(self, study_id: int): "conditions": [c.to_dict() for c in study_conditions], "description": study_description_.to_dict(), "identification": identifiers.to_dict(), - }, 200 @api.response(200, "Success") diff --git a/apis/study_metadata/study_overall_official.py b/apis/study_metadata/study_overall_official.py index ec3a2e9f..e97181eb 100644 --- a/apis/study_metadata/study_overall_official.py +++ b/apis/study_metadata/study_overall_official.py @@ -24,7 +24,9 @@ "affiliation_identifier": fields.String(required=True), "affiliation_identifier_scheme": fields.String(required=False), "affiliation_identifier_scheme_uri": fields.String(required=False), - "role": fields.String(required=True), # Allows null in JSON Schema but RESTx doesn't support nullable fields + "role": fields.String( + required=True + ), # Allows null in JSON Schema but RESTx doesn't support nullable fields "degree": fields.String(required=False), }, ) diff --git a/apis/study_metadata/study_team.py b/apis/study_metadata/study_team.py index ce22f2e3..176beea8 100644 --- a/apis/study_metadata/study_team.py +++ b/apis/study_metadata/study_team.py @@ -4,10 +4,10 @@ from flask import Response, request from flask_restx import Resource, fields +from jsonschema import ValidationError, validate import model from apis.study_metadata_namespace import api -from jsonschema import ValidationError, validate from ..authentication import is_granted @@ -24,26 +24,45 @@ "Sponsor", "Principal Investigator", "Sponsor-Investigator", - ] + ], + ), + "responsible_party_investigator_first_name": fields.String( + required=True + ), + "responsible_party_investigator_last_name": fields.String( + required=True + ), + "responsible_party_investigator_title": fields.String( + required=True ), - "responsible_party_investigator_first_name": fields.String(required=True), - "responsible_party_investigator_last_name": fields.String(required=True), - "responsible_party_investigator_title": fields.String(required=True), "lead_sponsor_name": fields.String(required=True), - "responsible_party_investigator_identifier_value": fields.String(required=False), - "responsible_party_investigator_identifier_scheme": fields.String(required=False), - "responsible_party_investigator_identifier_scheme_uri": fields.String(required=False), - "responsible_party_investigator_affiliation_name": fields.String(required=False), - "responsible_party_investigator_affiliation_identifier_scheme": fields.String(required=False), - "responsible_party_investigator_affiliation_identifier_value": fields.String(required=False), - "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String(required=False), + "responsible_party_investigator_identifier_value": fields.String( + required=False + ), + "responsible_party_investigator_identifier_scheme": fields.String( + required=False + ), + "responsible_party_investigator_identifier_scheme_uri": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_name": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_identifier_scheme": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_identifier_value": fields.String( + required=False + ), + "responsible_party_investigator_affiliation_identifier_scheme_uri": fields.String( + required=False + ), "lead_sponsor_identifier": fields.String(required=False), "lead_sponsor_identifier_scheme": fields.String(required=False), "lead_sponsor_identifier_scheme_uri": fields.String(required=False), - } + }, ) ), - "collaborators": fields.List( fields.Nested( api.model( diff --git a/model/user.py b/model/user.py index 42a79320..85351e51 100644 --- a/model/user.py +++ b/model/user.py @@ -25,6 +25,7 @@ def __init__(self, password): hash = db.Column(db.String, nullable=False) created_at = db.Column(db.BigInteger, nullable=False) email_verified = db.Column(db.BOOLEAN, nullable=True) + password_reset_token = db.Column(db.String, nullable=True) study_contributors = db.relationship("StudyContributor", back_populates="user") email_verification = db.relationship("EmailVerification", back_populates="user") @@ -82,9 +83,9 @@ def update(self, data): data["username"] if "username" in data else data["email_address"] ) # self.email_verified = data["email_verified"] - # self.username = data["username"] - # self.hash = data["hash"] - # self.created_at = data["created_at"] + + def update_password_reset(self, password_reset_token): + self.password_reset_token = password_reset_token def set_password(self, password: str): """setting bcrypt passwords""" diff --git a/modules/invitation.py b/modules/invitation.py index 6583a057..fd792a2f 100644 --- a/modules/invitation.py +++ b/modules/invitation.py @@ -71,22 +71,42 @@ def send_access_contributors(to, study, first_name, last_name, role): azure_email_connection(html_content, subject, to) -# def send_invitation_general(to, token): -# accept_url = f"{config.FAIRHUB_FRONTEND_URL}auth/signup?code={token}&email={to}" -# html_content = render_template( -# "accept_general_invitation.html", token=token, accept_url=accept_url, to=to -# ) -# subject, from_email, to = ( -# f"You have been invited to signup to FAIRhub", -# "aydan.gasimova2@example.com", -# to, -# ) -# -# # msg = EmailMessage(subject, html_content, from_email, [to]) -# # msg.content_subtype = "html" -# # msg.send() -# azure_email_connection(html_content, subject) -# +def forgot_password(to, first_name, last_name, token): + reset_password_ = f"{config.FAIRHUB_FRONTEND_URL}auth/reset-password?token={token}" + + html_content = render_template( + "forgot_password.html", + forgot_password_=reset_password_, + email=to, + first_name=first_name, + last_name=last_name + ) + subject, from_email, to = ( + f" Password Change", + "aydan.gasimova2@gmail.com", + to, + ) + azure_email_connection(html_content, subject, to) + + +def reset_password(to, first_name, last_name): + reset_password_ = f"{config.FAIRHUB_FRONTEND_URL}/user/profile" + + html_content = render_template( + "reset_password.html", + reset_password_=reset_password_, + email=to, + first_name=first_name, + last_name=last_name + ) + subject, from_email, to = ( + f" Password Change", + "aydan.gasimova2@gmail.com", + to, + ) + azure_email_connection(html_content, subject, to) + + def send_email_verification(email_address, token): verification_url = ( diff --git a/pyproject.toml b/pyproject.toml index 0a38562f..c0e2d658 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,6 @@ waitress = "^2.1.2" # Email flask-mail = "^0.9.1" flask-mailman = "^1.0.0" -azure-communication-email = "^1.0.0" # API Client requests = "^2.31.0" @@ -83,6 +82,7 @@ pandas = "^2.2.0" numpy = "^1.26.4" pycap = "^2.6.0" azure-storage-blob = "^12.19.1" +azure-communication-email = "^1.0.0" [tool.poetry.group.dev.dependencies] @@ -201,4 +201,4 @@ markers = [] [build-system] requires = ["poetry-core>=1.3.2"] -build-backend = "poetry.core.masonry.api" +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/templates/forgot_password.html b/templates/forgot_password.html new file mode 100644 index 00000000..a5fc61ab --- /dev/null +++ b/templates/forgot_password.html @@ -0,0 +1,56 @@ + + + + Invitation + + + +
+
+ +
+

Hello {{ ' ' ~ email if email else '' }}!

+

Your password reset request has been received.

+

Please click the button to reset your password.

+ Reset password + +

Or use the following link: + + {{ forgot_password_ }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+
+ diff --git a/templates/reset_password.html b/templates/reset_password.html new file mode 100644 index 00000000..d31bb31a --- /dev/null +++ b/templates/reset_password.html @@ -0,0 +1,56 @@ + + + + Invitation + + + +
+
+ +
+

Hello {{ ' ' ~ email if email else '' }}!

+

We wanted to inform you that your password has been successfully changed.

+

If you did not make this change, please click here to reset your password immediately!

+ Reset password + +

Or see here: + + {{ reset_password_ }} + +

+

Need assistance or have questions? Our dedicated support team is here to help. Reach out to us at help@fairhub.io for prompt assistance.

+
+
Sincerely,
+
FAIRhub team
+
+
+ diff --git a/tests/conftest.py b/tests/conftest.py index 646c9e80..84370211 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -109,6 +109,9 @@ pytest.global_viewer_token = "" +pytest.global_reset_token = "" + + # Create the flask app for testing @pytest.fixture(scope="session") def flask_app(): @@ -319,6 +322,7 @@ def clients(flask_app): _admin_client = flask_app.test_client() _editor_client = flask_app.test_client() _viewer_client = flask_app.test_client() + # _test_client = flask_app.test_client() with unittest.mock.patch("pytest_config.TestConfig", TestConfig): response = _logged_in_client.post( diff --git a/tests/functional/test_070_user.py b/tests/functional/test_070_user.py index 2fc40617..067b5745 100644 --- a/tests/functional/test_070_user.py +++ b/tests/functional/test_070_user.py @@ -1,3 +1,6 @@ +import datetime + +from apis.authentication import set_now from model.db import db # ------------------- Password Change ------------------- # @@ -118,6 +121,157 @@ def test_post_login_new_password(clients): assert len(session_entries) == 1 +def test_post_reset_password(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + assert token is not None + + reset_response = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "Updatedpassword4testing!1", + "new_password": "Updatedpassword4testing!1", + }, + ) + assert reset_response.status_code == 200 + + response = _test_client.post( + "/auth/login", + json={ + "email_address": "test@fairhub.io", + "password": "Updatedpassword4testing!1", + }, + ) + + assert response.status_code == 200 + logout_response = _test_client.post("/auth/logout") + assert logout_response.status_code == 204 + + +def test_post_reset_password_invalidation(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + assert token is not None + + reset_response = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "invalidatepassword4testing!", + "new_password": "invalidatepassword4testing!", + }, + ) + + assert reset_response.status_code == 200 + + reset_response_old = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "uniquepassword4testing!", + "new_password": "uniquepassword4testing!", + }, + ) + + assert reset_response_old.status_code == 400 + + +def test_post_reset_password_is_not_same_old(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + assert token is not None + + reset_response_old = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "invalidatepassword4testing!", + "new_password": "invalidatepassword4testing!", + }, + ) + + assert reset_response_old.status_code == 422 + + +def test_post_reset_password_expired(flask_app): + """ + Given a Flask application configured for testing + WHEN the '/auth/password/reset-password' endpoint is requested (POST) + THEN check that the response is valid and the password is changed + """ + _test_client = flask_app.test_client() + + set_now( + datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(minutes=6) + ) + forgot_response = _test_client.post( + "/auth/forgot-password", + json={ + "email_address": "test@fairhub.io", + }, + ) + set_now(None) + + assert forgot_response.status_code == 200 + + token = forgot_response.headers["X-Token"] + + assert token is not None + + reset_response = _test_client.post( + "/auth/reset-password", + json={ + "token": token, + "confirm_password": "Updatedpassword4testing!", + "new_password": "Updatedpassword4testing!", + }, + ) + assert reset_response.status_code == 401 + + def test_post_logout(clients): """ Given a Flask application configured for testing From e33a77d0eb84c580b524df92b3fe030598332170 Mon Sep 17 00:00:00 2001 From: Aidan <62059163+Aydawka@users.noreply.github.com> Date: Wed, 18 Jun 2025 08:24:05 -0700 Subject: [PATCH 490/505] fix: uploaded files (#66) * fix: uploaded files * fix: uploaded files * fix: comment out recursion api * fix: error handling * fix: error handling * fix: error handling * style: format * feat: add azure storage datalake * style: format * fix: remove package-mode * fix: restore azure storage datalake * fix: study testing * fix: rename container name --- apis/dataset.py | 3 +- apis/file.py | 228 ++++++++++++++++++++++++++++++---------------- apis/study.py | 16 +++- config.py | 4 +- model/__init__.py | 4 + model/file.py | 14 +++ model/folder.py | 23 +++++ poetry.lock | 29 +++++- pyproject.toml | 2 +- 9 files changed, 231 insertions(+), 92 deletions(-) create mode 100644 model/file.py create mode 100644 model/folder.py diff --git a/apis/dataset.py b/apis/dataset.py index 5524583c..b2b41016 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -31,8 +31,7 @@ "id": fields.String(required=True), "updated_on": fields.String(required=True), "created_at": fields.String(required=True), - "dataset_versions": fields.Nested(dataset_versions_model, required=True), - "latest_version": fields.String(required=True), + "latest_version": fields.Boolean(required=True), "title": fields.String(required=True), "description": fields.String(required=True), }, diff --git a/apis/file.py b/apis/file.py index c2ad8d98..e31f3b14 100644 --- a/apis/file.py +++ b/apis/file.py @@ -2,42 +2,130 @@ import importlib import os -import uuid -from datetime import datetime, timezone -from urllib.parse import quote +import typing -import requests +from azure.storage.filedatalake import FileSystemClient from flask_restx import Namespace, Resource, reqparse +import model + api = Namespace("File", description="File operations", path="/") +class FileException(Exception): + pass + + +@api.errorhandler(FileException) +def handle_file_exception(error): + return {"message": str(error)}, 404 + + +# @api.route("/study//files1") +# class Files(Resource): +# """Files for a study""" +# +# parser = reqparse.RequestParser() +# parser.add_argument("path", type=str, required=False, location="args") +# @api.doc(description="Return a list of all files for a study") +# @api.param("path", "The folder path on the file system") +# @api.response(200, "Success") +# @api.response(400, "Validation Error") +# def get(self, study_id): # pylint: disable=unused-argument +# """Return a list of all files for a study""" +# # with the same name as the study id. +# +# # Determine the appropriate configuration module based on the testing context +# if os.environ.get("FLASK_ENV") == "testing": +# config_module_name = "pytest_config" +# else: +# config_module_name = "config" +# +# config_module = importlib.import_module(config_module_name) +# if os.environ.get("FLASK_ENV") == "testing": +# # If testing, use the 'TestConfig' class for accessing 'secret' +# config = config_module.TestConfig +# else: +# # If not testing, directly use the 'config' module +# config = config_module +# if not config.AZURE_STORAGE_CONNECTION_STRING and not config.AZURE_CONTAINER: +# return "azure connection string is missing", 404 +# def get_file_tree(): +# container = config.CONTAINER +# file_system_client = FileSystemClient.from_connection_string( +# config.AZURE_STORAGE_CONNECTION_STRING, +# file_system_name=container, +# ) +# source: str = f"AI-READI/test-files/{study_id}" +# return recurse_file_tree(file_system_client, source) +# +# def recurse_file_tree(file_system_client: FileSystemClient, source: str): +# source_client = file_system_client.get_directory_client(source) +# if not source_client.exists(): +# raise FileException("source directory does not exist!") +# props = source_client.get_directory_properties() +# updated_on = props['last_modified'] +# size = 0 +# path_name = os.path.basename(source) +# return model.FolderStructure( +# path_name, +# size, +# updated_on, +# True, +# [ +# ( +# recurse_file_tree(file_system_client, child_path.name) +# if child_path.is_directory +# else model.FileStructure( +# os.path.basename(child_path.name), +# child_path.content_length, +# child_path.last_modified, +# False +# ) +# ) +# for child_path in file_system_client.get_paths(source, recursive=False) +# ], +# ) +# return get_file_tree().to_dict(), 200 + + @api.route("/study//files") class Files(Resource): """Files for a study""" parser = reqparse.RequestParser() - parser.add_argument("path", type=str, required=False, location="args") - - @api.doc(description="Return a list of all files for a study") - @api.param("path", "The folder path on the file system") + parser.add_argument( + "path", + type=str, + required=False, + location="args", + default="", + help="The folder path to list. Defaults to the study root.", + ) + + @api.doc( + description="Return a flat list of files and folders for a given path within a study." + ) + @api.param("path", "The folder path on the file system to explore.") @api.response(200, "Success") - @api.response(400, "Validation Error") + @api.response(400, "Validation Error or Invalid Path") + @api.response(404, "Path not found") def get(self, study_id): # pylint: disable=unused-argument - """Return a list of all files for a study""" + """Returns a flat list of files and folders for a given path""" + study = model.Study.query.get(study_id) + if not study: + return "Study not found", 404 - # todo: anticipating that each study will have a folder in the storage account - # with the same name as the study id. + args = self.parser.parse_args() + relative_path = args.get("path", "") + relative_path = relative_path.lstrip("/\\") - # Determine the appropriate configuration module - # based on the testing context if os.environ.get("FLASK_ENV") == "testing": config_module_name = "pytest_config" else: config_module_name = "config" config_module = importlib.import_module(config_module_name) - if os.environ.get("FLASK_ENV") == "testing": # If testing, use the 'TestConfig' class for accessing 'secret' config = config_module.TestConfig @@ -45,69 +133,51 @@ def get(self, study_id): # pylint: disable=unused-argument # If not testing, directly use the 'config' module config = config_module - storage_account_name = config.FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME - storage_account_sas_token = config.FAIRHUB_AZURE_READ_SAS_TOKEN - request_time = datetime.now(timezone.utc).strftime("%a, %d %b %Y %H:%M:%S GMT") - - container = "pooled-data-pilot" # todo: this should be the study id - - query_params = ( - f"recursive=false&resource=filesystem&{storage_account_sas_token}" + if not config.AZURE_STORAGE_CONNECTION_STRING and not config.AZURE_CONTAINER: + return "azure connection string is missing", 404 + # --- Path Sanitization --- + base_dir = os.path.normpath(f"AI-READI/test-files/{study_id}") + full_path = os.path.normpath(os.path.join(base_dir, relative_path)) + if os.path.commonpath([base_dir, full_path]) != base_dir: + return {"message": "Access denied: Invalid path provided."}, 400 + source_path = full_path.replace("\\", "/") + + # --- Azure Client and Directory Listing --- + file_system_client = FileSystemClient.from_connection_string( + config.AZURE_STORAGE_CONNECTION_STRING, + file_system_name=config.AZURE_CONTAINER, ) - request_args = self.parser.parse_args() - - # subdirectory traversal - if prefix_path := request_args["path"]: - print(prefix_path) - query_path = quote(prefix_path.encode("utf-8")) - query_params = f"directory={query_path}&{query_params}" - - url = f"https://{storage_account_name}.dfs.core.windows.net/{container}?{query_params}" # noqa: E501 # pylint: disable=line-too-long - - print(url) - - api_version = "2023-08-03" - headers = { - "x-ms-date": request_time, - "x-ms-version": api_version, - } - - try: - response = requests.get( - url, - headers=headers, - timeout=30, - ) - - response_json = response.json() - - print(response_json) - - paths = [] - - for file in response_json["paths"]: - data = { - "id": str(uuid.uuid4()), - "content_length": file["contentLength"], - # "created_at": file["creationTime"], - "name": file["name"], - "is_directory": bool("isDirectory" in file and file["isDirectory"]), - "last_modified": file["lastModified"], - } - - # convert lastModified to unix timestamp - if "lastModified" in file: - date_string = file["lastModified"] - date_object = datetime.strptime( - date_string, "%a, %d %b %Y %H:%M:%S %Z" - ) - - data["updated_on"] = int(date_object.timestamp()) - - paths.append(data) - - return paths - except requests.exceptions.RequestException as e: - print(f"An error occurred: {e}") - return "Something went wrong with the request", 500 + directory_client = file_system_client.get_directory_client(source_path) + + # Check for existence and raise exception as requested + if not directory_client.exists(): + raise FileException(f"Source directory does not exist: {source_path}") + + # The response is a simple list of items in the directory + directory_contents = [] + + for child_path in file_system_client.get_paths( + path=source_path, recursive=False + ): + item: typing.Union[model.FolderStructure, model.FileStructure] + + if child_path.is_directory: + item = model.FolderStructure( + name=os.path.basename(child_path.name), + content_length=0, + updated_on=child_path.last_modified, + is_directory=True, + files=[], + ) + else: + item = model.FileStructure( + name=os.path.basename(child_path.name), + content_length=child_path.content_length, + updated_on=child_path.last_modified, + is_directory=False, + ) + + directory_contents.append(item.to_dict()) + + return directory_contents, 200 diff --git a/apis/study.py b/apis/study.py index 70313a0c..12c88d59 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,12 +1,12 @@ """APIs for study operations""" "" from typing import Any, Union - +import os +from azure.storage.filedatalake import FileSystemClient from flask import Response, g, request from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate - +import config import model - from .authentication import is_granted api = Namespace("Study", description="Study operations", path="/") @@ -70,14 +70,12 @@ def post(self): "image": {"type": "string"}, }, } - try: validate(request.json, schema) except ValidationError as e: return e.message, 400 data: Union[Any, dict] = request.json - add_study = model.Study.from_data(data) model.db.session.add(add_study) @@ -88,6 +86,14 @@ def post(self): model.db.session.add(study_contributor) model.db.session.commit() + if os.environ.get("FLASK_ENV") != "testing": + container = config.AZURE_CONTAINER + + file_system_client = FileSystemClient.from_connection_string( + config.AZURE_STORAGE_CONNECTION_STRING, + file_system_name=container, + ) + file_system_client.create_directory(f"AI-READI/test-files/{study_id}") return study_.to_dict(), 201 diff --git a/config.py b/config.py index 22e48fdf..9652f7ef 100644 --- a/config.py +++ b/config.py @@ -21,9 +21,11 @@ def get_env(key): FAIRHUB_SECRET = get_env("FAIRHUB_SECRET") FAIRHUB_AZURE_READ_SAS_TOKEN = get_env("FAIRHUB_AZURE_READ_SAS_TOKEN") FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME = get_env("FAIRHUB_AZURE_STORAGE_ACCOUNT_NAME") +AZURE_CONTAINER = get_env("AZURE_CONTAINER") FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER = get_env("FAIRHUB_TEMP_BLOB_STORAGE_REDCAP_ETL_CONTAINER") FAIRHUB_GROWTHBOOK_CLIENT_KEY = get_env("FAIRHUB_GROWTHBOOK_CLIENT_KEY") FAIRHUB_SMTP_CONNECTION_STRING = get_env("FAIRHUB_SMTP_CONNECTION_STRING") FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS = get_env("FAIRHUB_SMTP_SENDER_EMAIL_ADDRESS") -FAIRHUB_FRONTEND_URL = get_env("FAIRHUB_FRONTEND_URL") +AZURE_STORAGE_CONNECTION_STRING = get_env("AZURE_STORAGE_CONNECTION_STRING") +FAIRHUB_FRONTEND_URL = get_env("FAIRHUB_FRONTEND_URL") \ No newline at end of file diff --git a/model/__init__.py b/model/__init__.py index ca42005b..742d6265 100644 --- a/model/__init__.py +++ b/model/__init__.py @@ -17,6 +17,8 @@ from .dataset_metadata.dataset_title import DatasetTitle from .db import db from .email_verification import EmailVerification +from .file import FileStructure +from .folder import FolderStructure from .invited_study_contributor import StudyInvitedContributor from .notification import Notification from .participant import Participant @@ -94,6 +96,8 @@ "StudyRedcap", "StudyDashboard", "StudyStatus", + "FileStructure", + "FolderStructure", "Identifiers", "Arm", "StudyInvitedContributor", diff --git a/model/file.py b/model/file.py new file mode 100644 index 00000000..6b82f2f2 --- /dev/null +++ b/model/file.py @@ -0,0 +1,14 @@ +class FileStructure: # type: ignore + def __init__(self, name, content_length, updated_on, is_directory): + self.name = name + self.size = content_length + self.updated_on = updated_on + self.is_directory = is_directory + + def to_dict(self): + return { + "name": self.name, + "content_length": self.size, + "updated_on": self.updated_on.isoformat() if self.updated_on else None, + "is_directory": self.is_directory, + } diff --git a/model/folder.py b/model/folder.py new file mode 100644 index 00000000..91423a93 --- /dev/null +++ b/model/folder.py @@ -0,0 +1,23 @@ +import typing + +from .file import FileStructure + + +class FolderStructure: # type: ignore + files: typing.List[typing.Union["FileStructure", "FolderStructure"]] + + def __init__(self, name, content_length, updated_on, is_directory, files): + self.name = name + self.content_length = content_length + self.updated_on = updated_on + self.is_directory = is_directory + self.files = files + + def to_dict(self): + return { + "name": self.name, + "content_length": self.content_length, + "updated_on": self.updated_on.isoformat() if self.updated_on else None, + "is_directory": self.is_directory, + "files": [i.to_dict() for i in self.files], + } diff --git a/poetry.lock b/poetry.lock index 04113f55..e4dc40bf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -313,14 +313,14 @@ azure-core = ">=1.31.0" [[package]] name = "azure-storage-blob" -version = "12.24.1" +version = "12.25.1" description = "Microsoft Azure Blob Storage Client Library for Python" category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "azure_storage_blob-12.24.1-py3-none-any.whl", hash = "sha256:77fb823fdbac7f3c11f7d86a5892e2f85e161e8440a7489babe2195bf248f09e"}, - {file = "azure_storage_blob-12.24.1.tar.gz", hash = "sha256:052b2a1ea41725ba12e2f4f17be85a54df1129e13ea0321f5a2fcc851cbf47d4"}, + {file = "azure_storage_blob-12.25.1-py3-none-any.whl", hash = "sha256:1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167"}, + {file = "azure_storage_blob-12.25.1.tar.gz", hash = "sha256:4f294ddc9bc47909ac66b8934bd26b50d2000278b10ad82cc109764fdc6e0e3b"}, ] [package.dependencies] @@ -332,6 +332,27 @@ typing-extensions = ">=4.6.0" [package.extras] aio = ["azure-core[aio] (>=1.30.0)"] +[[package]] +name = "azure-storage-file-datalake" +version = "12.20.0" +description = "Microsoft Azure File DataLake Storage Client Library for Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_storage_file_datalake-12.20.0-py3-none-any.whl", hash = "sha256:d32af627ccf7f8523ac4520a4004291b8ca340d7c607545366e1b08822afa01b"}, + {file = "azure_storage_file_datalake-12.20.0.tar.gz", hash = "sha256:8d6932b75470de935998755d980a3296412d8ff2a11631c1d99267ae65f11f03"}, +] + +[package.dependencies] +azure-core = ">=1.30.0" +azure-storage-blob = ">=12.25.1" +isodate = ">=0.6.1" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["azure-core[aio] (>=1.30.0)"] + [[package]] name = "babel" version = "2.17.0" @@ -4484,4 +4505,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "6e0584b39354841ea870ef44b3b8c27ffde8cb292d8a1dad071340ab5625682b" +content-hash = "cf4657c4a05782f8af6e317d9a2c13b8a58ddfb461dbcd85b15c7b202f9c52a7" diff --git a/pyproject.toml b/pyproject.toml index c0e2d658..f4618493 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,6 +83,7 @@ numpy = "^1.26.4" pycap = "^2.6.0" azure-storage-blob = "^12.19.1" azure-communication-email = "^1.0.0" +azure-storage-file-datalake = "^12.20.0" [tool.poetry.group.dev.dependencies] @@ -173,7 +174,6 @@ quiet = true profile = "black" [tool.mypy] -package-mode = false ignore_missing_imports = true no_implicit_optional = true check_untyped_defs = true From f988053a151b63195f50acf0b4d37e57a2a72641 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 1 Jul 2025 22:58:26 -0700 Subject: [PATCH 491/505] fix: creating study check --- apis/study.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/apis/study.py b/apis/study.py index 12c88d59..498d91b1 100644 --- a/apis/study.py +++ b/apis/study.py @@ -88,12 +88,12 @@ def post(self): model.db.session.commit() if os.environ.get("FLASK_ENV") != "testing": container = config.AZURE_CONTAINER - - file_system_client = FileSystemClient.from_connection_string( - config.AZURE_STORAGE_CONNECTION_STRING, - file_system_name=container, - ) - file_system_client.create_directory(f"AI-READI/test-files/{study_id}") + if config.AZURE_STORAGE_CONNECTION_STRING and config.AZURE_CONTAINER: + file_system_client = FileSystemClient.from_connection_string( + config.AZURE_STORAGE_CONNECTION_STRING, + file_system_name=container, + ) + file_system_client.create_directory(f"AI-READI/test-files/{study_id}") return study_.to_dict(), 201 From 2449c09ecdec565ecfde398500d1ed8209474482 Mon Sep 17 00:00:00 2001 From: Aidan <62059163+Aydawka@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:30:30 -0700 Subject: [PATCH 492/505] =?UTF-8?q?feat:=20=E2=9C=A8=20add=20clinical=20tr?= =?UTF-8?q?ials=20integration=20(#67)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add integration to models * feat: add integration for intervention * wip: integrations * fix: update models * fix: creating study check * style: format * style: format * fix: study add * fix: minor testing * style: format * style: format * fix: error handling * fix: comments * fix: minor issue --- apis/study.py | 118 +++++++++++++++-- model/study.py | 121 ++++++++++++++++-- model/study_metadata/study_arm.py | 10 +- model/study_metadata/study_central_contact.py | 10 +- model/study_metadata/study_collaborators.py | 10 +- model/study_metadata/study_conditions.py | 11 +- model/study_metadata/study_description.py | 10 +- model/study_metadata/study_design.py | 46 ++++++- model/study_metadata/study_eligibility.py | 17 ++- model/study_metadata/study_identification.py | 9 +- model/study_metadata/study_intervention.py | 10 +- model/study_metadata/study_keywords.py | 11 +- model/study_metadata/study_location.py | 12 +- .../study_metadata/study_overall_official.py | 22 +++- model/study_metadata/study_oversight.py | 19 ++- model/study_metadata/study_sponsors.py | 34 ++++- model/study_metadata/study_status.py | 44 ++++++- tests/functional/test_020_study_api.py | 19 +-- 18 files changed, 441 insertions(+), 92 deletions(-) diff --git a/apis/study.py b/apis/study.py index 498d91b1..e04fe795 100644 --- a/apis/study.py +++ b/apis/study.py @@ -1,12 +1,17 @@ """APIs for study operations""" "" -from typing import Any, Union import os +import re +from typing import Any, Union + +import requests from azure.storage.filedatalake import FileSystemClient from flask import Response, g, request from flask_restx import Namespace, Resource, fields, reqparse from jsonschema import ValidationError, validate + import config import model + from .authentication import is_granted api = Namespace("Study", description="Study operations", path="/") @@ -16,7 +21,6 @@ "Study", { "title": fields.String(required=True, default=""), - "image": fields.String(required=True, default=""), }, ) @@ -53,7 +57,7 @@ def get(self): return [s.to_dict() for s in studies], 200 - @api.expect(study_model) + # @api.expect(study_model) @api.response(201, "Success") @api.response(400, "Validation Error") def post(self): @@ -68,15 +72,19 @@ def post(self): "title": {"type": "string", "minLength": 1, "maxLength": 300}, "short_description": {"type": "string", "maxLength": 300}, "image": {"type": "string"}, + "clinical_id": {"type": ["string", "null"]}, }, } + + data: Union[Any, dict] = request.json + add_study = model.Study.from_data(data) + identifier = data.get("clinical_id") + try: - validate(request.json, schema) + validate(instance=data, schema=schema) except ValidationError as e: return e.message, 400 - data: Union[Any, dict] = request.json - add_study = model.Study.from_data(data) model.db.session.add(add_study) study_id = add_study.id @@ -85,8 +93,8 @@ def post(self): study_contributor = model.StudyContributor.from_data(study_, g.user, "owner") model.db.session.add(study_contributor) - model.db.session.commit() if os.environ.get("FLASK_ENV") != "testing": + # TODO finish study testing integration container = config.AZURE_CONTAINER if config.AZURE_STORAGE_CONNECTION_STRING and config.AZURE_CONTAINER: file_system_client = FileSystemClient.from_connection_string( @@ -94,6 +102,45 @@ def post(self): file_system_name=container, ) file_system_client.create_directory(f"AI-READI/test-files/{study_id}") + try: + if isinstance(identifier, str) and re.match(r"^NCT\d{8}$", identifier.strip()): + + url = f"https://classic.clinicaltrials.gov/api/v2/studies/{identifier}" + # AI-READI id-NCT06002048 + + response = requests.get(url, timeout=10) + if response.status_code == 404: + return { + "error": "No clinical study was found with the provided identifier", + "status_code": 404, + "message": f"No study found for identifier '{identifier}'." + }, 404 + + if response.status_code != 200: + return { + "error": "Failed to fetch clinical trial data", + "status_code": response.status_code, + "message": f"ClinicalTrials.gov returned status {response.status_code}." + }, response.status_code + + clinical_data = response.json() + study_.update_identification_id(clinical_data["protocolSection"]) + study_.import_from_clinical_data( + clinical_data["protocolSection"] + ) + except requests.exceptions.RequestException as e: + return { + "error": "Failed to connect to ClinicalTrials.gov API", + "status_code": 503, + "message": str(e) + }, 503 + except Exception as e: + return { + "error": "Unexpected server error", + "status_code": 500, + "message": str(e) + }, 500 + model.db.session.commit() return study_.to_dict(), 201 @@ -121,12 +168,13 @@ def put(self, study_id: int): # Schema validation schema = { "type": "object", - "required": ["title", "image", "short_description"], + "required": ["title", "short_description", "is_overwrite"], "additionalProperties": False, "properties": { "title": {"type": "string", "minLength": 1}, - "image": {"type": "string", "minLength": 1}, "short_description": {"type": "string", "maxLength": 300}, + "is_overwrite": {"type": "boolean"}, + "clinical_id": {"type": ["string", "null"]}, }, } @@ -136,11 +184,59 @@ def put(self, study_id: int): return e.message, 400 update_study = model.Study.query.get(study_id) - + data: Union[Any, dict] = request.json if not is_granted("update_study", update_study): return "Access denied, you can not modify", 403 - update_study.update(request.json) + identifier = data["clinical_id"].strip() + is_overwrite = data["is_overwrite"] + + update_study.update(data) + + if identifier: + try: + if not identifier or not isinstance(identifier, str): + raise ValueError("Identifier must be a non-empty string.") + + if not re.match(r"^NCT\d{8}$", identifier): + raise ValueError("Identifier must be in the format 'NCT########'.") + + url = f"https://classic.clinicaltrials.gov/api/v2/studies/{identifier}" + + response = requests.get(url, timeout=10) + if response.status_code == 404: + return { + "error": "No clinical study was found with the provided identifier", + "status_code": 404, + "message": f"No study found for identifier '{identifier}'." + }, 404 + + if response.status_code != 200: + return { + "error": "Failed to fetch clinical trial data", + "status_code": response.status_code, + "message": f"ClinicalTrials.gov returned status {response.status_code}." + }, response.status_code + + clinical_data = response.json() + update_study.update_identification_id(clinical_data["protocolSection"]) + if is_overwrite: + update_study.import_from_clinical_data( + clinical_data["protocolSection"] + ) + except requests.exceptions.RequestException as e: + return { + "error": "Failed to connect to ClinicalTrials.gov API", + "status_code": 503, + "message": str(e) + }, 503 + except Exception as e: + return { + "error": "Unexpected server error", + "status_code": 500, + "message": str(e) + }, 500 + model.db.session.commit() return update_study.to_dict(), 200 diff --git a/model/study.py b/model/study.py index 4846685d..95f36b8c 100644 --- a/model/study.py +++ b/model/study.py @@ -1,5 +1,7 @@ import datetime +import re import uuid +from typing import cast from flask import g @@ -25,7 +27,6 @@ def __init__(self): self.study_design = model.StudyDesign(self) self.study_eligibility = model.StudyEligibility(self) self.study_description = model.StudyDescription(self) - self.study_identification.append(model.StudyIdentification(self, False)) self.study_other = model.StudyOther(self) self.study_oversight = model.StudyOversight(self) @@ -242,22 +243,118 @@ def update(self, data: dict): """Updates the study from a dictionary""" if not data["title"]: raise exception.ValidationException("title is required") - if not data["image"]: - raise exception.ValidationException("image is required") self.title = data["title"] - self.image = data["image"] self.short_description = data["short_description"] + if "image" in data and data["image"]: + self.image = data["image"] self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() - def validate(self): - """Validates the study""" - violations: list = [] - # if self.description.trim() == "": - # violations.push("A description is required") - # if self.keywords.length < 1: - # violations.push("At least one keyword must be specified") - return violations + def update_identification_id(self, data): + clinical_id = None + identifiers = [ + i + for i in cast(list, self.study_identification) + if re.match(r"^NCT\d{8}$", i.identifier) + ] + if not identifiers: + clinical_id = model.StudyIdentification(self, False) + self.study_identification.append(clinical_id) + model.db.session.add(clinical_id) + else: + clinical_id = identifiers[0] + + if clinical_id is not None: + clinical_id.updating_from_integration(data) + + def import_from_clinical_data(self, data): + """Updates the study from a dictionary""" + + self.study_description.updating_from_integration(data) + self.study_status.updating_from_integration(data) + self.study_eligibility.updating_from_integration(data) + self.study_sponsors.updating_from_integration(data) + self.study_design.updating_from_integration(data) + self.study_oversight.updating_from_integration(data) + self.title = data.get("identificationModule", {}).get("officialTitle", "") + interventions_data = data.get("armsInterventionsModule", {}).get( + "interventions", [] + ) + # Loop through an array and delete each object + for intervention in cast(list, self.study_intervention): + model.db.session.delete(intervention) + + for intervention_dict in interventions_data: + # Make the new intervention + intervention = model.StudyIntervention(self) + # Put data from dict into it + intervention.updating_from_integration(intervention_dict) + # Add to a database + self.study_intervention.append(intervention) + + keywords_data = data.get("conditionsModule", {}).get("keywords", []) + for k in cast(list, self.study_keywords): + model.db.session.delete(k) + for k_dict in keywords_data: + keywords = model.StudyKeywords(self) + keywords.updating_from_integration(k_dict) + self.study_keywords.append(keywords) + + conditions_data = data.get("conditionsModule", {}).get("conditions", []) + for c in cast(list, self.study_conditions): + model.db.session.delete(c) + for conditions_dict in conditions_data: + conditions = model.StudyConditions(self) + conditions.updating_from_integration(conditions_dict) + self.study_conditions.append(conditions) + + collaborators_data = data.get("sponsorCollaboratorsModule", {}).get( + "collaborators", [] + ) + # Loop through an array and delete each object + for collaborator in cast(list, self.study_collaborators): + model.db.session.delete(collaborator) + + for collaborator_dict in collaborators_data: + collaborator = model.StudyCollaborators(self) + collaborator.updating_from_integration(collaborator_dict) + self.study_collaborators.append(collaborator) + + arms_data = data.get("armsInterventionsModule", {}).get("armGroups", []) + # Loop through an array and delete each object + for arm in cast(list, self.study_arm): + model.db.session.delete(arm) + + for arm_dict in arms_data: + arm = model.StudyArm(self) + arm.updating_from_integration(arm_dict) + self.study_arm.append(arm) + + overall_official_data = data.get("contactsLocationsModule", {}).get( + "overallOfficials", [] + ) + # Loop through an array and delete each object + for oo in cast(list, self.study_overall_official): + model.db.session.delete(oo) + + for oo_dict in overall_official_data: + o_o = model.StudyOverallOfficial(self) + # Put data from dict into it + o_o.updating_from_integration(oo_dict) + # Add to a database + self.study_overall_official.append(o_o) + + location_data = data.get("contactsLocationsModule", {}).get("locations", []) + # Loop through an array and delete each object + for location in cast(list, self.study_location): + model.db.session.delete(location) + + for location_dict in location_data: + location = model.StudyLocation(self) + # Put data from dict into it + location.updating_from_integration(location_dict) + # Add to a database + self.study_location.append(location) def touch(self): self.updated_on = datetime.datetime.now(datetime.timezone.utc).timestamp() diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index db4609cf..b0f9208a 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -66,7 +66,9 @@ def update(self, data: dict): self.intervention_list = data["intervention_list"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyArm from a dictionary""" + self.label = data.get("label", "") + self.description = data.get("description", "") + self.type = "" + self.intervention_list = [] diff --git a/model/study_metadata/study_central_contact.py b/model/study_metadata/study_central_contact.py index 9822ecc0..440cbc22 100644 --- a/model/study_metadata/study_central_contact.py +++ b/model/study_metadata/study_central_contact.py @@ -97,7 +97,9 @@ def update(self, data: dict): self.email_address = data["email_address"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyCentralContact from a dictionary""" + self.name = data.get("name", "") + self.identifier = "" + self.scheme = "" + self.scheme_uri = "" diff --git a/model/study_metadata/study_collaborators.py b/model/study_metadata/study_collaborators.py index ca02af5d..fabec477 100644 --- a/model/study_metadata/study_collaborators.py +++ b/model/study_metadata/study_collaborators.py @@ -64,7 +64,9 @@ def update(self, data: dict): self.scheme_uri = data["identifier_scheme_uri"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyCollaborators from a dictionary""" + self.name = data.get("name", "") + self.identifier = "" + self.scheme = "" + self.scheme_uri = "" diff --git a/model/study_metadata/study_conditions.py b/model/study_metadata/study_conditions.py index 6d0bf83b..30bbb0ac 100644 --- a/model/study_metadata/study_conditions.py +++ b/model/study_metadata/study_conditions.py @@ -67,7 +67,10 @@ def update(self, data: dict): self.condition_uri = data["condition_uri"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyConditions from a dictionary""" + self.name = data + self.classification_code = "" + self.scheme = "" + self.scheme_uri = "" + self.condition_uri = "" diff --git a/model/study_metadata/study_description.py b/model/study_metadata/study_description.py index f5115882..2cc2c041 100644 --- a/model/study_metadata/study_description.py +++ b/model/study_metadata/study_description.py @@ -50,7 +50,9 @@ def update(self, data: dict): self.detailed_description = data["detailed_description"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyDescription from a dictionary""" + self.brief_summary = data.get("descriptionModule", {}).get("briefSummary", "") + self.detailed_description = data.get("descriptionModule", {}).get( + "detailedDescription", "" + ) diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 40567ca0..8f8678f6 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -115,7 +115,45 @@ def update(self, data: dict): self.is_patient_registry = data["is_patient_registry"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyDesign from a dictionary""" + self.study_type = data.get("designModule", {}).get("studyType", "").capitalize() + self.design_observational_model_list = data.get("designModule", {}).get( + "observationalModel", "" + ) + self.design_time_perspective_list = data.get("designModule", {}).get( + "timePerspective", "" + ) + self.phase_list = data.get("designModule", {}).get("phases", "") + self.design_allocation = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("allocation", "") + .capitalize() + ) + self.design_primary_purpose = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("primaryPurpose", "") + .capitalize() + ) + self.design_intervention_model = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("interventionModel", "") + .capitalize() + ) + val = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("maskingInfo", {}) + .get("masking", "") + ) + self.design_masking = "None (Open Label)" if val == "NONE" else val + + self.enrollment_count = ( + data.get("designModule", {}).get("enrollmentInfo", {}).get("count", "") + ) + self.enrollment_type = ( + data.get("designModule", {}).get("enrollmentInfo", {}).get("type", "") + ) diff --git a/model/study_metadata/study_eligibility.py b/model/study_metadata/study_eligibility.py index 8c6ee872..49bee3b3 100644 --- a/model/study_metadata/study_eligibility.py +++ b/model/study_metadata/study_eligibility.py @@ -99,7 +99,16 @@ def update(self, data: dict): self.sampling_method = data["sampling_method"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyEligibility from a dictionary""" + eligibility = data.get("eligibilityModule", {}) + self.sex = eligibility.get("sex", "").capitalize() + val = eligibility.get("healthyVolunteers", "false") + self.healthy_volunteers = "Yes" if str(val).lower() == "true" else "No" + self.study_population = eligibility.get("studyPopulation", "") + raw = eligibility.get("samplingMethod", "") + self.sampling_method = raw.replace("_", "-", 1).replace("_", " ").title() + min_age = eligibility.get("minimumAge", "").split() + max_age = eligibility.get("maximumAge", "").split() + self.minimum_age_value, self.minimum_age_unit = (min_age + ["", ""])[:2] + self.maximum_age_value, self.maximum_age_unit = (max_age + ["", ""])[:2] diff --git a/model/study_metadata/study_identification.py b/model/study_metadata/study_identification.py index 299f20bd..33d7f677 100644 --- a/model/study_metadata/study_identification.py +++ b/model/study_metadata/study_identification.py @@ -65,7 +65,8 @@ def update(self, data: dict): self.identifier_link = data["identifier_link"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyIdentification from a dictionary""" + self.identifier_type = "Other Identifier" + self.identifier_domain = "clinicaltrials.gov" + self.identifier = data.get("identificationModule", {}).get("nctId", "").strip() diff --git a/model/study_metadata/study_intervention.py b/model/study_metadata/study_intervention.py index 89ff4b90..373f7c7e 100644 --- a/model/study_metadata/study_intervention.py +++ b/model/study_metadata/study_intervention.py @@ -67,7 +67,9 @@ def update(self, data: dict): self.other_name_list = data["other_name_list"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyIntervention from a dictionary""" + self.name = data.get("name", "") + self.type = data.get("type", "").replace("_", " ").title() + self.description = data.get("description", "") + self.other_name_list = "" diff --git a/model/study_metadata/study_keywords.py b/model/study_metadata/study_keywords.py index 5ff213a9..698a4924 100644 --- a/model/study_metadata/study_keywords.py +++ b/model/study_metadata/study_keywords.py @@ -67,7 +67,10 @@ def update(self, data: dict): self.keyword_uri = data["keyword_uri"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyKeywords from a dictionary""" + self.name = data + self.classification_code = "" + self.scheme = "" + self.scheme_uri = "" + self.keyword_uri = "" diff --git a/model/study_metadata/study_location.py b/model/study_metadata/study_location.py index cbfe9f91..806f5f70 100644 --- a/model/study_metadata/study_location.py +++ b/model/study_metadata/study_location.py @@ -75,7 +75,11 @@ def update(self, data: dict): self.country = data["country"] self.study.touch() - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyLocation from a dictionary""" + self.facility = data.get("facility", "") + self.city = data.get("city", "") + self.state = data.get("state", "") + self.zip = data.get("zip", "") + self.country = data.get("country", "") + self.status = "" diff --git a/model/study_metadata/study_overall_official.py b/model/study_metadata/study_overall_official.py index a6f6c561..474b4c7a 100644 --- a/model/study_metadata/study_overall_official.py +++ b/model/study_metadata/study_overall_official.py @@ -96,7 +96,21 @@ def update(self, data: dict): self.role = data["role"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudyOverallOfficial from a dictionary""" + parts = data.get("name", "").strip().split() + self.first_name, self.last_name = ( + (parts[0], parts[1] if len(parts) > 1 else "") if parts else ("", "") + ) + + self.affiliation = data.get("affiliation", "") + self.role = data.get("role", "") + self.role = data.get("role", "").replace("_", " ").title() + + self.degree = "" + self.identifier = "" + self.identifier_scheme = "" + self.identifier_scheme_uri = "" + self.affiliation_identifier = "" + self.affiliation_identifier_scheme = "" + self.affiliation_identifier_scheme_uri = "" diff --git a/model/study_metadata/study_oversight.py b/model/study_metadata/study_oversight.py index 3fc0c0cb..966de04b 100644 --- a/model/study_metadata/study_oversight.py +++ b/model/study_metadata/study_oversight.py @@ -53,7 +53,18 @@ def update(self, data: dict): self.has_dmc = data["has_dmc"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyOversight from a dictionary""" + self.has_dmc = ( + "Yes" if (data.get("oversightModule", {}).get("oversightHasDmc")) else "No" + ) + self.fda_regulated_drug = ( + "Yes" + if (data.get("oversightModule", {}).get("isFdaRegulatedDrug")) + else "No" + ) + self.fda_regulated_device = ( + "Yes" + if (data.get("oversightModule", {}).get("isFdaRegulatedDevice")) + else "No" + ) diff --git a/model/study_metadata/study_sponsors.py b/model/study_metadata/study_sponsors.py index 1e7832ca..26deef1e 100644 --- a/model/study_metadata/study_sponsors.py +++ b/model/study_metadata/study_sponsors.py @@ -141,7 +141,33 @@ def update(self, data: dict): "lead_sponsor_identifier_scheme_uri" ] - def validate(self): - """Validates the lead_sponsor_last_name study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """it updates a StudySponsors from a dictionary""" + self.responsible_party_type = ( + data.get("sponsorCollaboratorsModule", {}) + .get("responsibleParty", {}) + .get("type", "") + .replace("_", " ") + .title() + ) + party = data.get("sponsorCollaboratorsModule", {}).get("responsibleParty", {}) + full_name = party.get("investigatorFullName", "").split(maxsplit=1) + self.responsible_party_investigator_first_name = ( + full_name[0] if full_name else "" + ) + self.responsible_party_investigator_last_name = ( + full_name[1] if len(full_name) > 1 else "" + ) + self.responsible_party_investigator_affiliation_name = ( + party.get("investigatorAffiliation", "").title().capitalize() + ) + self.responsible_party_investigator_affiliation_name = ( + data.get("sponsorCollaboratorsModule", {}) + .get("responsibleParty", {}) + .get("investigatorAffiliation", "") + ) + self.lead_sponsor_name = ( + data.get("sponsorCollaboratorsModule", {}) + .get("leadSponsor", {}) + .get("name", "") + ) diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index dd1ffe78..5b444dfb 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -1,3 +1,5 @@ +from datetime import datetime + from model import Study from ..db import db @@ -68,7 +70,41 @@ def update(self, data: dict): self.completion_date_type = data["completion_date_type"] self.study.touch() - def validate(self): - """Validates the study""" - violations: list = [] - return violations + def updating_from_integration(self, data: dict): + """It updates a StudyStatus from a dictionary""" + status_map = { + "WITHDRAWN": "Withdrawn", + "RECRUITING": "Recruiting", + "ACTIVE_NOT_RECRUITING": "Active, not recruiting", + "NOT_YET_RECRUITING": "Not yet recruiting", + "SUSPENDED": "Suspended", + "ENROLLING_BY_INVITATION": "Enrolling by invitation", + "COMPLETED": "Completed", + "TERMINATED": "Terminated", + } + raw_status = data.get("statusModule", {}).get("overallStatus", "") + self.overall_status = status_map.get(raw_status, "") + + self.overall_status = ( + data.get("statusModule", {}) + .get("overallStatus", "") + .replace("_", " ") + .title() + ) + s_d = data.get("statusModule", {}).get("startDateStruct", {}).get("date") + self.start_date = datetime.strptime(s_d, "%Y-%m-%d") if s_d else None + + c_d = data.get("statusModule", {}).get("completionDateStruct", {}).get("date") + self.completion_date = datetime.strptime(c_d, "%Y-%m-%d") if c_d else None + self.start_date_type = ( + data.get("statusModule", {}) + .get("startDateStruct", {}) + .get("type", "") + .capitalize() + ) + self.completion_date_type = ( + data.get("statusModule", {}) + .get("completionDateStruct", {}) + .get("type", "") + .capitalize() + ) diff --git a/tests/functional/test_020_study_api.py b/tests/functional/test_020_study_api.py index f1a52c85..9dd94c85 100644 --- a/tests/functional/test_020_study_api.py +++ b/tests/functional/test_020_study_api.py @@ -18,6 +18,7 @@ def test_post_study(_logged_in_client): "title": "Study Title", "image": "https://api.dicebear.com/6.x/adventurer/svg", "short_description": "short_description", + "clinical_id": "NCT06002048", }, ) @@ -98,17 +99,16 @@ def test_update_study(clients): f"/study/{study_id}", json={ "title": "Study Title Updated", - "image": pytest.global_study_id["image"], # type: ignore "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) - assert response.status_code == 200 response_data = json.loads(response.data) pytest.global_study_id = response_data assert response_data["title"] == "Study Title Updated" - assert response_data["image"] == pytest.global_study_id["image"] # type: ignore assert response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert response_data["id"] == pytest.global_study_id["id"] # type: ignore @@ -116,8 +116,9 @@ def test_update_study(clients): f"/study/{study_id}", json={ "title": "Admin Study Title", - "image": pytest.global_study_id["image"], # type: ignore "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) @@ -127,15 +128,15 @@ def test_update_study(clients): assert admin_response_data["title"] == "Admin Study Title" assert admin_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore - assert admin_response_data["image"] == pytest.global_study_id["image"] # type: ignore assert admin_response_data["id"] == pytest.global_study_id["id"] # type: ignore editor_response = _editor_client.put( f"/study/{study_id}", json={ "title": "Editor Study Title", - "image": pytest.global_study_id["image"], # type: ignore "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) @@ -144,7 +145,6 @@ def test_update_study(clients): pytest.global_study_id = editor_response_data assert editor_response_data["title"] == "Editor Study Title" - assert editor_response_data["image"] == pytest.global_study_id["image"] # type: ignore assert editor_response_data["short_description"] == pytest.global_study_id["short_description"] # type: ignore assert editor_response_data["id"] == pytest.global_study_id["id"] # type: ignore @@ -152,8 +152,9 @@ def test_update_study(clients): f"/study/{study_id}", json={ "title": "Viewer Study Title", - "image": pytest.global_study_id["image"], # type: ignore "short_description": pytest.global_study_id["short_description"], # type: ignore + "clinical_id": "NCT06002048", + "is_overwrite": False, }, ) @@ -224,9 +225,9 @@ def test_delete_studies_created(clients): "title": "Delete Me", "image": "https://api.dicebear.com/6.x/adventurer/svg", "short_description": "short_description", + "clinical_id": "NCT06002048", }, ) - assert response.status_code == 201 response_data = json.loads(response.data) study_id = response_data["id"] From 3186c992afc7f9de60f09268720a12c79d296d05 Mon Sep 17 00:00:00 2001 From: Aidan <62059163+Aydawka@users.noreply.github.com> Date: Tue, 15 Jul 2025 23:09:21 -0700 Subject: [PATCH 493/505] fix: clinical trials automation fix (#68) * fix: status error handling * fix: error handling --- apis/study.py | 25 +++++---- apis/study_metadata/study_status.py | 35 ++++++------ model/study.py | 79 ++++++++++------------------ model/study_metadata/study_arm.py | 6 ++- model/study_metadata/study_design.py | 32 +++++++++-- model/study_metadata/study_status.py | 26 +++++++-- 6 files changed, 112 insertions(+), 91 deletions(-) diff --git a/apis/study.py b/apis/study.py index e04fe795..4c518190 100644 --- a/apis/study.py +++ b/apis/study.py @@ -103,8 +103,9 @@ def post(self): ) file_system_client.create_directory(f"AI-READI/test-files/{study_id}") try: - if isinstance(identifier, str) and re.match(r"^NCT\d{8}$", identifier.strip()): - + if isinstance(identifier, str) and re.match( + r"^NCT\d{8}$", identifier.strip() + ): url = f"https://classic.clinicaltrials.gov/api/v2/studies/{identifier}" # AI-READI id-NCT06002048 @@ -113,32 +114,30 @@ def post(self): return { "error": "No clinical study was found with the provided identifier", "status_code": 404, - "message": f"No study found for identifier '{identifier}'." + "message": f"No study found for identifier '{identifier}'.", }, 404 if response.status_code != 200: return { "error": "Failed to fetch clinical trial data", "status_code": response.status_code, - "message": f"ClinicalTrials.gov returned status {response.status_code}." + "message": f"ClinicalTrials.gov returned status {response.status_code}.", }, response.status_code clinical_data = response.json() study_.update_identification_id(clinical_data["protocolSection"]) - study_.import_from_clinical_data( - clinical_data["protocolSection"] - ) + study_.import_from_clinical_data(clinical_data["protocolSection"]) except requests.exceptions.RequestException as e: return { "error": "Failed to connect to ClinicalTrials.gov API", "status_code": 503, - "message": str(e) + "message": str(e), }, 503 except Exception as e: return { "error": "Unexpected server error", "status_code": 500, - "message": str(e) + "message": str(e), }, 500 model.db.session.commit() @@ -208,14 +207,14 @@ def put(self, study_id: int): return { "error": "No clinical study was found with the provided identifier", "status_code": 404, - "message": f"No study found for identifier '{identifier}'." + "message": f"No study found for identifier '{identifier}'.", }, 404 if response.status_code != 200: return { "error": "Failed to fetch clinical trial data", "status_code": response.status_code, - "message": f"ClinicalTrials.gov returned status {response.status_code}." + "message": f"ClinicalTrials.gov returned status {response.status_code}.", }, response.status_code clinical_data = response.json() @@ -228,13 +227,13 @@ def put(self, study_id: int): return { "error": "Failed to connect to ClinicalTrials.gov API", "status_code": 503, - "message": str(e) + "message": str(e), }, 503 except Exception as e: return { "error": "Unexpected server error", "status_code": 500, - "message": str(e) + "message": str(e), }, 500 model.db.session.commit() diff --git a/apis/study_metadata/study_status.py b/apis/study_metadata/study_status.py index 41a7c51e..121240aa 100644 --- a/apis/study_metadata/study_status.py +++ b/apis/study_metadata/study_status.py @@ -3,7 +3,7 @@ import typing from flask import request -from flask_restx import Resource, fields +from flask_restx import Resource, fields, marshal from jsonschema import ValidationError, validate import model @@ -11,16 +11,15 @@ from ..authentication import is_granted -study_status = api.model( +study_status_model = api.model( "StudyStatus", { - "id": fields.String(required=True), - "overall_status": fields.String(required=True), + "overall_status": fields.String(required=False), "why_stopped": fields.String(required=True), - "start_date": fields.String(required=True), - "start_date_type": fields.String(required=True), - "completion_date": fields.String(required=True), - "completion_date_type": fields.String(required=True), + "start_date": fields.String(required=False), + "start_date_type": fields.String(required=False), + "completion_date": fields.String(required=False), + "completion_date_type": fields.String(required=False), }, ) @@ -33,7 +32,7 @@ class StudyStatusResource(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") # @api.param("id", "The study identifier") - @api.marshal_with(study_status) + @api.marshal_with(study_status_model) def get(self, study_id: int): """Get study status metadata""" study_ = model.Study.query.get(study_id) @@ -44,7 +43,6 @@ def get(self, study_id: int): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(study_status) def put(self, study_id: int): """Update study status metadata""" # Schema validation @@ -55,7 +53,6 @@ def put(self, study_id: int): "start_date", "start_date_type", "overall_status", - "why_stopped", "completion_date", "completion_date_type", ], @@ -95,20 +92,20 @@ def put(self, study_id: int): return e.message, 400 data: typing.Union[typing.Any, dict] = request.json - if data["overall_status"] in ["Completed", "Terminated", "Suspended"]: - if "why_stopped" not in data or not data["why_stopped"]: - return ( - f"why_stopped is required for overall_status: {data['overall_status']}", - 400, - ) - study_obj = model.Study.query.get(study_id) if not is_granted("study_metadata", study_obj): return "Access denied, you can not modify study", 403 + if data.get("overall_status") in ["Completed", "Terminated", "Suspended"]: + why_stopped = data.get("why_stopped", "") + if not why_stopped or not why_stopped.strip(): + return { + "message": f"why_stopped is required for overall_status: {data['overall_status']}" + }, 400 study = model.Study.query.get(study_id) study.study_status.update(request.json) model.db.session.commit() - return study.study_status.to_dict(), 200 + result = marshal(study.study_status.to_dict(), study_status_model) + return result, 200 diff --git a/model/study.py b/model/study.py index 95f36b8c..829a2e90 100644 --- a/model/study.py +++ b/model/study.py @@ -43,120 +43,120 @@ def __init__(self): dataset = db.relationship( "Dataset", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_contributors = db.relationship( "StudyContributor", back_populates="study", lazy="dynamic", - cascade="all, delete", + cascade="all, delete-orphan", ) participants = db.relationship( "Participant", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) invited_contributors = db.relationship( "StudyInvitedContributor", back_populates="study", lazy="dynamic", - cascade="all, delete", + cascade="all, delete-orphan", ) study_arm = db.relationship( "StudyArm", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_sponsors = db.relationship( "StudySponsors", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_central_contact = db.relationship( "StudyCentralContact", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_description = db.relationship( "StudyDescription", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_design = db.relationship( "StudyDesign", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_eligibility = db.relationship( "StudyEligibility", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_identification = db.relationship( "StudyIdentification", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) # NOTE: Has not been tested study_redcap = db.relationship( - "StudyRedcap", back_populates="study", cascade="all, delete" + "StudyRedcap", back_populates="study", cascade="all, delete-orphan" ) # NOTE: Has not been tested study_dashboard = db.relationship( - "StudyDashboard", back_populates="study", cascade="all, delete" + "StudyDashboard", back_populates="study", cascade="all, delete-orphan" ) study_intervention = db.relationship( "StudyIntervention", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_location = db.relationship( "StudyLocation", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_other = db.relationship( "StudyOther", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_keywords = db.relationship( "StudyKeywords", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_conditions = db.relationship( "StudyConditions", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_collaborators = db.relationship( "StudyCollaborators", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_oversight = db.relationship( "StudyOversight", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_overall_official = db.relationship( "StudyOverallOfficial", back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) study_status = db.relationship( "StudyStatus", uselist=False, back_populates="study", - cascade="all, delete", + cascade="all, delete-orphan", ) def to_dict(self): @@ -280,29 +280,21 @@ def import_from_clinical_data(self, data): interventions_data = data.get("armsInterventionsModule", {}).get( "interventions", [] ) - # Loop through an array and delete each object - for intervention in cast(list, self.study_intervention): - model.db.session.delete(intervention) - + self.study_intervention.clear() for intervention_dict in interventions_data: - # Make the new intervention intervention = model.StudyIntervention(self) - # Put data from dict into it intervention.updating_from_integration(intervention_dict) - # Add to a database self.study_intervention.append(intervention) keywords_data = data.get("conditionsModule", {}).get("keywords", []) - for k in cast(list, self.study_keywords): - model.db.session.delete(k) + self.study_keywords.clear() for k_dict in keywords_data: keywords = model.StudyKeywords(self) keywords.updating_from_integration(k_dict) self.study_keywords.append(keywords) conditions_data = data.get("conditionsModule", {}).get("conditions", []) - for c in cast(list, self.study_conditions): - model.db.session.delete(c) + self.study_conditions.clear() for conditions_dict in conditions_data: conditions = model.StudyConditions(self) conditions.updating_from_integration(conditions_dict) @@ -311,9 +303,7 @@ def import_from_clinical_data(self, data): collaborators_data = data.get("sponsorCollaboratorsModule", {}).get( "collaborators", [] ) - # Loop through an array and delete each object - for collaborator in cast(list, self.study_collaborators): - model.db.session.delete(collaborator) + self.study_collaborators.clear() for collaborator_dict in collaborators_data: collaborator = model.StudyCollaborators(self) @@ -321,9 +311,7 @@ def import_from_clinical_data(self, data): self.study_collaborators.append(collaborator) arms_data = data.get("armsInterventionsModule", {}).get("armGroups", []) - # Loop through an array and delete each object - for arm in cast(list, self.study_arm): - model.db.session.delete(arm) + self.study_arm.clear() for arm_dict in arms_data: arm = model.StudyArm(self) @@ -333,27 +321,18 @@ def import_from_clinical_data(self, data): overall_official_data = data.get("contactsLocationsModule", {}).get( "overallOfficials", [] ) - # Loop through an array and delete each object - for oo in cast(list, self.study_overall_official): - model.db.session.delete(oo) + self.study_overall_official.clear() for oo_dict in overall_official_data: o_o = model.StudyOverallOfficial(self) - # Put data from dict into it o_o.updating_from_integration(oo_dict) - # Add to a database self.study_overall_official.append(o_o) location_data = data.get("contactsLocationsModule", {}).get("locations", []) - # Loop through an array and delete each object - for location in cast(list, self.study_location): - model.db.session.delete(location) - + self.study_location.clear() for location_dict in location_data: location = model.StudyLocation(self) - # Put data from dict into it location.updating_from_integration(location_dict) - # Add to a database self.study_location.append(location) def touch(self): diff --git a/model/study_metadata/study_arm.py b/model/study_metadata/study_arm.py index b0f9208a..00e99028 100644 --- a/model/study_metadata/study_arm.py +++ b/model/study_metadata/study_arm.py @@ -70,5 +70,9 @@ def updating_from_integration(self, data: dict): """It updates a StudyArm from a dictionary""" self.label = data.get("label", "") self.description = data.get("description", "") - self.type = "" + # self.type = "" self.intervention_list = [] + self.type = (data.get("type") or "").replace("_", " ").title() + self.intervention_list = [ + i.replace("_", " ").title() for i in data.get("interventionNames", []) + ] diff --git a/model/study_metadata/study_design.py b/model/study_metadata/study_design.py index 8f8678f6..e1433ce2 100644 --- a/model/study_metadata/study_design.py +++ b/model/study_metadata/study_design.py @@ -124,12 +124,16 @@ def updating_from_integration(self, data: dict): self.design_time_perspective_list = data.get("designModule", {}).get( "timePerspective", "" ) - self.phase_list = data.get("designModule", {}).get("phases", "") + self.phase_list = data.get("designModule", {}).get("phases", []) + self.phase_list = [ + i.lower().replace("phase", "Phase ") for i in self.phase_list + ] self.design_allocation = ( data.get("designModule", {}) .get("designInfo", {}) .get("allocation", "") - .capitalize() + .replace("_", "-") + .title() ) self.design_primary_purpose = ( data.get("designModule", {}) @@ -149,11 +153,31 @@ def updating_from_integration(self, data: dict): .get("maskingInfo", {}) .get("masking", "") ) - self.design_masking = "None (Open Label)" if val == "NONE" else val + self.design_masking = ( + val + if val == "N/A" + else "None (Open Label)" + if val == "NONE" + else "Blinded (no details)" + if val.lower().startswith("blind") + else val.capitalize() + ) + + self.design_who_masked_list = ( + data.get("designModule", {}) + .get("designInfo", {}) + .get("maskingInfo", {}) + .get("whoMasked", []) + ) + self.design_who_masked_list = [ + i.replace("_", " ").title() for i in self.design_who_masked_list + ] self.enrollment_count = ( data.get("designModule", {}).get("enrollmentInfo", {}).get("count", "") ) self.enrollment_type = ( data.get("designModule", {}).get("enrollmentInfo", {}).get("type", "") - ) + ).capitalize() + if self.enrollment_type == "Estimated": + self.enrollment_type = "Anticipated" diff --git a/model/study_metadata/study_status.py b/model/study_metadata/study_status.py index 5b444dfb..45ff48c6 100644 --- a/model/study_metadata/study_status.py +++ b/model/study_metadata/study_status.py @@ -92,18 +92,36 @@ def updating_from_integration(self, data: dict): .title() ) s_d = data.get("statusModule", {}).get("startDateStruct", {}).get("date") - self.start_date = datetime.strptime(s_d, "%Y-%m-%d") if s_d else None + self.start_date = ( + datetime.strptime(s_d, "%Y-%m-%d") if s_d and len(s_d) == 10 else None + ) c_d = data.get("statusModule", {}).get("completionDateStruct", {}).get("date") - self.completion_date = datetime.strptime(c_d, "%Y-%m-%d") if c_d else None + self.completion_date = ( + datetime.strptime(c_d, "%Y-%m-%d") if c_d and len(c_d) == 10 else None + ) + self.start_date_type = ( - data.get("statusModule", {}) + "Anticipated" + if data.get("statusModule", {}) + .get("startDateStruct", {}) + .get("type", "") + .lower() + == "estimated" + else data.get("statusModule", {}) .get("startDateStruct", {}) .get("type", "") .capitalize() ) + self.completion_date_type = ( - data.get("statusModule", {}) + "Anticipated" + if data.get("statusModule", {}) + .get("completionDateStruct", {}) + .get("type", "") + .lower() + == "estimated" + else data.get("statusModule", {}) .get("completionDateStruct", {}) .get("type", "") .capitalize() From 0b82c4c20af2b62a8ac5c9e141488d31e211ee52 Mon Sep 17 00:00:00 2001 From: aydawka Date: Tue, 30 Sep 2025 13:10:50 -0700 Subject: [PATCH 494/505] fix: dataset updated date --- apis/dataset.py | 2 +- model/dataset.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/apis/dataset.py b/apis/dataset.py index b2b41016..68194dfd 100644 --- a/apis/dataset.py +++ b/apis/dataset.py @@ -42,7 +42,7 @@ class DatasetList(Resource): @api.response(200, "Success") @api.response(400, "Validation Error") - @api.marshal_with(dataset) + # @api.marshal_with(dataset) @api.doc("view datasets") def get(self, study_id): study = model.Study.query.get(study_id) diff --git a/model/dataset.py b/model/dataset.py index 320f2929..567dab7c 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -128,6 +128,7 @@ def to_dict(self): i.description if i.type == "Abstract" else None for i in self.dataset_description # type: ignore ][0], + "updated_on": self.updated_on } def to_dict_dataset_metadata(self): From 9de84471c13ca98b65b0788a0e10abcfab136125 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 16:31:51 -0800 Subject: [PATCH 495/505] fix: huge etl upgrades --- apis/__init__.py | 3 +- apis/dashboard.py | 213 +- model/dataset.py | 2 +- model/session.py | 1 - modules/__init__.py | 6 +- modules/etl/config/aireadi_config.py | 8877 ++++++++--------- modules/etl/transforms/module_transform.py | 125 +- .../etl/transforms/redcap_live_transform.py | 667 +- .../transforms/redcap_release_transform.py | 709 +- modules/etl/vtypes/compound.py | 2 - modules/etl/vtypes/mixed.py | 2 - modules/etl/vtypes/timeseries.py | 7 +- modules/etl/vtypes/vtype.py | 117 +- poetry.lock | 4391 ++++---- pyproject.toml | 6 +- 15 files changed, 8062 insertions(+), 7066 deletions(-) diff --git a/apis/__init__.py b/apis/__init__.py index 71482d7d..883cb1d8 100644 --- a/apis/__init__.py +++ b/apis/__init__.py @@ -27,12 +27,11 @@ from .study_metadata.study_design import api as design from .study_metadata.study_eligibility import api as eligibility from .study_metadata.study_intervention import api as intervention - from .study_metadata.study_location import api as location from .study_metadata.study_overall_official import api as overall_official from .study_metadata.study_oversight import api as oversight -from .study_metadata.study_team import api as sponsors from .study_metadata.study_status import api as status +from .study_metadata.study_team import api as sponsors from .user import api as user from .utils import api as utils diff --git a/apis/dashboard.py b/apis/dashboard.py index 70c60d1b..8a5b5ef8 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -211,6 +211,75 @@ ) +def execute_transform( + transformConfig: Dict[str, Any], + redcap_project_view: Dict[str, Any], + redcap_project_dashboard: Dict[str, Any], + live: bool = False, +) -> Dict[str, Any]: + # Set report_ids for ETL + report_keys = [] + for report in redcap_project_dashboard["reports"]: + for i, report_config in enumerate(transformConfig["reports"]): + if ( + len(report["report_id"]) > 0 + and report["report_key"] == report_config["key"] + ): + report_keys.append(report["report_key"]) + transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ + "report_id" + ] + + # Remove Unused Reports + transformConfig["reports"] = [ + report for report in transformConfig["reports"] if report["key"] in report_keys + ] + + # Set Post Transform Merge + index_columns, post_transform_merges = transformConfig["post_transform_merge"] + transformConfig["post_transform_merge"] = ( + index_columns, + [ + (report_key, transform_kwdargs) + for report_key, transform_kwdargs in post_transform_merges + if report_key in report_keys + ], + ) + + # Execute REDCap Data Transform + if not live: + redcap_etl_config = transformConfig + redcapTransform = RedcapReleaseTransform(redcap_etl_config).run() + else: + transformConfig["redcap_api_url"] = redcap_project_view["api_url"] + transformConfig["redcap_api_key"] = redcap_project_view["api_key"] + redcap_etl_config = transformConfig + redcapTransform = RedcapLiveTransform(redcap_etl_config).run() + + # Execute Dashboard Module Transforms + for dashboard_module in redcap_project_dashboard["modules"]: + if dashboard_module["selected"]: + mergedTransform = redcapTransform.merged + transform, module_etl_config = moduleTransformConfigs[ + dashboard_module["id"] + ] + moduleTransform = ModuleTransform(module_etl_config) + transformed = getattr(moduleTransform, transform)( + mergedTransform + ).transformed + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": transformed, + } + else: + dashboard_module["visualizations"] = { + "id": dashboard_module["id"], + "data": [], + } + + return redcap_project_dashboard + + @api.route("/study//dashboard") class RedcapProjectDashboards(Resource): @api.doc("Get all study dashboards") @@ -387,13 +456,6 @@ def get(self, study_id: str, dashboard_id: str): if not is_granted("view", study): return "Access denied, you can not view this dashboard", 403 - # # Retrieve Dashboard Redis Cache if Available - # cached_redcap_project_dashboard = caching.cache.get( - # f"$study_id#{study_id}$dashboard_id#{dashboard_id}" - # ) - # if cached_redcap_project_dashboard is not None: - # return cached_redcap_project_dashboard, 201 - # Get Base Transform Config for ETL - Live transformConfig = redcapLiveTransformConfig @@ -412,66 +474,14 @@ def get(self, study_id: str, dashboard_id: str): ) redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() - # Set report_ids for ETL - report_keys = [] - for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(transformConfig["reports"]): - if ( - len(report["report_id"]) > 0 - and report["report_key"] == report_config["key"] - ): - report_keys.append(report["report_key"]) - transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ - "report_id" - ] - - # Remove Unused Reports - transformConfig["reports"] = [ - report - for report in redcapLiveTransformConfig["reports"] - if report["key"] in report_keys - ] - - # Set Post Transform Merge - index_columns, post_transform_merges = transformConfig["post_transform_merge"] - transformConfig["post_transform_merge"] = ( - index_columns, - [ - (report_key, transform_kwdargs) - for report_key, transform_kwdargs in post_transform_merges - if report_key in report_keys - ], - ) - - # Set REDCap API Config - transformConfig["redcap_api_url"] = redcap_project_view["api_url"] - transformConfig["redcap_api_key"] = redcap_project_view["api_key"] - # Finalize ETL Config - redcap_etl_config = transformConfig - - redcapTransform = RedcapLiveTransform(redcap_etl_config) - - # Execute Dashboard Module Transforms - for dashboard_module in redcap_project_dashboard["modules"]: - if dashboard_module["selected"]: - mergedTransform = redcapTransform.merged - transform, module_etl_config = moduleTransformConfigs[ - dashboard_module["id"] - ] - moduleTransform = ModuleTransform(module_etl_config) - transformed = getattr(moduleTransform, transform)( - mergedTransform - ).transformed - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": transformed, - } - else: - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": [], - } + transformConfig = redcapLiveTransformConfig + redcap_project_dashboard = execute_transform( + transformConfig, + redcap_project_view, + redcap_project_dashboard, + live=True, + ) # Create Dashboard Redis Cache caching.cache.set( @@ -657,6 +667,13 @@ def get(self, study_id: str): # Public Dashboard ID dashboard_id = redcap_project_dashboard["id"] + # Get REDCap Project + redcap_id = redcap_project_dashboard["redcap_id"] + redcap_project_view_query: Any = model.db.session.query(model.StudyRedcap).get( + redcap_id + ) + redcap_project_view: Dict[str, Any] = redcap_project_view_query.to_dict() + # # Retrieve Dashboard Redis Cache if Available # cached_redcap_project_dashboard = caching.cache.get( # f"$study_id#{study_id}$dashboard_id#{dashboard_id}#public" @@ -668,67 +685,15 @@ def get(self, study_id: str): # No Cache, Do ETL # - # Get Base Transform Config for ETL - Release + # Finalize ETL Config transformConfig = redcapReleaseTransformConfig - - # Set report_ids for ETL - report_keys = [] - for report in redcap_project_dashboard["reports"]: - for i, report_config in enumerate(transformConfig["reports"]): - if ( - len(report["report_id"]) > 0 - and report["report_key"] == report_config["key"] - ): - report_keys.append(report["report_key"]) - transformConfig["reports"][i]["kwdargs"]["report_id"] = report[ - "report_id" - ] - - # Remove Unused Reports - transformConfig["reports"] = [ - report - for report in redcapLiveTransformConfig["reports"] - if report["key"] in report_keys - ] - - # Set Post Transform Merge - index_columns, post_transform_merges = transformConfig["post_transform_merge"] - transformConfig["post_transform_merge"] = ( - index_columns, - [ - (report_key, transform_kwdargs) - for report_key, transform_kwdargs in post_transform_merges - if report_key in report_keys - ], + redcap_project_dashboard = execute_transform( + transformConfig, + redcap_project_view, + redcap_project_dashboard, + live=False, ) - # Finalize ETL Config - redcap_etl_config = transformConfig - - # Execute REDCap Release ETL - redcapTransform = RedcapReleaseTransform(redcap_etl_config) - - # Execute Dashboard Module Transforms - for dashboard_module in redcap_project_dashboard["modules"]: - if dashboard_module["selected"]: - mergedTransform = redcapTransform.merged - transform, module_etl_config = moduleTransformConfigs[ - dashboard_module["id"] - ] - moduleTransform = ModuleTransform(module_etl_config) - transformed = getattr(moduleTransform, transform)( - mergedTransform - ).transformed - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": transformed, - } - else: - dashboard_module["visualizations"] = { - "id": dashboard_module["id"], - "data": [], - } - # Create Dashboard Redis Cache caching.cache.set( f"$study_id#{study_id}$dashboard_id#{dashboard_id}#public", diff --git a/model/dataset.py b/model/dataset.py index 567dab7c..e8e255d6 100644 --- a/model/dataset.py +++ b/model/dataset.py @@ -128,7 +128,7 @@ def to_dict(self): i.description if i.type == "Abstract" else None for i in self.dataset_description # type: ignore ][0], - "updated_on": self.updated_on + "updated_on": self.updated_on, } def to_dict_dataset_metadata(self): diff --git a/model/session.py b/model/session.py index e26ea7e2..aabe6cbb 100644 --- a/model/session.py +++ b/model/session.py @@ -1,5 +1,4 @@ from . import User - from .db import db diff --git a/modules/__init__.py b/modules/__init__.py index f46dda20..78d0903a 100644 --- a/modules/__init__.py +++ b/modules/__init__.py @@ -1,6 +1,8 @@ -from .etl import config, transforms, vtypes +from .etl import config +from .etl import transforms +from .etl import vtypes from .invitation import signin_notification from .invitation import send_access_contributors from .invitation import send_invitation_study -from .invitation import send_email_verification \ No newline at end of file +from .invitation import send_email_verification diff --git a/modules/etl/config/aireadi_config.py b/modules/etl/config/aireadi_config.py index 419fecb1..2086e269 100644 --- a/modules/etl/config/aireadi_config.py +++ b/modules/etl/config/aireadi_config.py @@ -15,105 +15,104 @@ # Utility Column Groups index_columns: List = [ - "record_id", + "record_id", ] # Data Column Groups data_columns: List = [ - "studyid", - "siteid", - "dm", - "genderid", - "scrsex", - "race", - "race2", - "ethnic", - "dvenvyn", - "dvenvstdat", - "dvenvcrcid", - "dvcgmyn", - "dvcgmstdat", - "dvcgmvrfy", - "dvamwyn", - "dvamwstdat", - "dvamwsn", - "dvrtmthd", - "dvrtnyn", - "dvrtnship", - "mhterm_dm1", - "mhterm_dm2", - "mhterm_predm", - "mh_dm_age", - "mh_a1c", - "cmtrt_a1c", - "cmtrt_insln", - "cmtrt_glcs", - "cmtrt_lfst", - "pacmpdat", + "studyid", + "siteid", + "dm", + "genderid", + "scrsex", + "race", + "race2", + "ethnic", + "dvenvyn", + "dvenvstdat", + "dvenvcrcid", + "dvcgmyn", + "dvcgmstdat", + "dvcgmvrfy", + "dvamwyn", + "dvamwstdat", + "dvamwsn", + "dvrtmthd", + "dvrtnyn", + "dvrtnship", + "mhterm_dm1", + "mhterm_dm2", + "mhterm_predm", + "mh_dm_age", + "mh_a1c", + "cmtrt_a1c", + "cmtrt_insln", + "cmtrt_glcs", + "cmtrt_lfst", + "pacmpdat", ] phase_2_columns: List = [ - "race_db", - "export_group", + "race_db", + "export_group", ] computed_columns: List = [ - "phenotypes", - "treatments", - "visitweek", - "visityear", - "visitdate", + "phenotypes", + "treatments", + "visitweek", + "visityear", + "visitdate", ] # Survey Column Groups survey_columns: List = [ - "screening_survey_complete", - "study_enrollment_complete", - "recruitment_survey_complete", - "faq_survey_complete", - "recruitment_survey_management_complete", - "device_distribution_complete", - "preconsent_survey_complete", - "consent_survey_complete", - "staff_consent_attestation_survey_complete", - "demographics_survey_complete", - "health_survey_complete", - "substance_use_survey_complete", - "cesd10_survey_complete", - "paid5_dm_survey_complete", - "diabetes_survey_complete", - "dietary_survey_complete", - "ophthalmic_survey_complete", - "px_sdoh_combined_survey_complete", - "px_food_insecurity_survey_complete", - "px_neighborhood_environment_survey_complete", - "px_racial_ethnic_discrimination_survey_complete", - "decline_participation_survey_complete", - "meds_assessment_complete", - "driving_record_complete", - "physical_assessment_complete", - "bcva_complete", - "photopic_mars_complete", - "mesopic_mars_complete", - "monofilament_complete", - "moca_complete", - "ecg_complete", - "retinal_imaging_v2_complete", - "lab_results_complete", - "device_return_complete", - "specimen_management_complete", - "disposition_complete", - "data_management_complete", + "screening_survey_complete", + "study_enrollment_complete", + "recruitment_survey_complete", + "faq_survey_complete", + "recruitment_survey_management_complete", + "device_distribution_complete", + "preconsent_survey_complete", + "consent_survey_complete", + "staff_consent_attestation_survey_complete", + "demographics_survey_complete", + "health_survey_complete", + "substance_use_survey_complete", + "cesd10_survey_complete", + "paid5_dm_survey_complete", + "diabetes_survey_complete", + "dietary_survey_complete", + "ophthalmic_survey_complete", + "px_sdoh_combined_survey_complete", + "px_food_insecurity_survey_complete", + "px_neighborhood_environment_survey_complete", + "px_racial_ethnic_discrimination_survey_complete", + "decline_participation_survey_complete", + "meds_assessment_complete", + "driving_record_complete", + "physical_assessment_complete", + "bcva_complete", + "photopic_mars_complete", + "mesopic_mars_complete", + "monofilament_complete", + "moca_complete", + "ecg_complete", + "lab_results_complete", + "device_return_complete", + "specimen_management_complete", + "disposition_complete", + "data_management_complete", ] # Repeat Survey Column Groups repeat_survey_columns: List = [ - "current_medications_complete", + "current_medications_complete", ] repeat_survey_data_columns: List = [ - "current_medications_complete", - "current_medications", + "current_medications_complete", + "current_medications", ] # @@ -121,47 +120,37 @@ # survey_instrument_map: Dict[str, str] = { - "2": "Complete", - "1": "Unverified", - "0": "Incomplete", - "": missing_value_generic, + "2": "Complete", + "1": "Unverified", + "0": "Incomplete", + "": missing_value_generic, } phenotypes_column_map: Dict[str, str] = { - "mhterm_dm2": "Type II Diabetes", - "mhterm_predm": "Prediabetes", - # "mh_a1c": "Elevated A1C", + "mhterm_dm2": "Type II Diabetes", + "mhterm_predm": "Prediabetes", } race_db_map: Dict[str, str] = { - "white": "White", - "black": "Black", - "hispanic": "Hispanic or Latino", - "asian": "Asian", - "unknown": "Unknown", - "": "Value Unavailable", - "Value Unavailable": "Value Unavailable", + "white": "White", + "black": "Black", + "hispanic": "Hispanic or Latino", + "asian": "Asian", + "unknown": "Unknown", + "": "Value Unavailable", + "Value Unavailable": "Value Unavailable", } export_group_map: Dict[str, str] = { - "pilot": "Pilot", - "year2": "Year 2", + "pilot": "Pilot", + "year2": "Year 2", } -# sex_column_map: Dict[str, str] = { -# "M": "Male", -# "F": "Female", -# "I": "Intersex", -# "888": "Other", -# "777": "Prefer not to say", -# } - - treatments_column_map: Dict[str, str] = { - "cmtrt_a1c": "Oral Medication", - "cmtrt_glcs": "Non-Insulin Injectable", - "cmtrt_insln": "Insulin Injectable", - "cmtrt_lfst": "Lifestyle Management", + "cmtrt_a1c": "Oral Medication", + "cmtrt_glcs": "Non-Insulin Injectable", + "cmtrt_insln": "Insulin Injectable", + "cmtrt_lfst": "Lifestyle Management", } # @@ -169,10 +158,10 @@ # redcap_report_merge_map: List[Tuple[str, Dict[str, Any]]] = [ - ("participant-list", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), - ("participant-values", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), - ("instrument-status", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), - ("repeat-instrument", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("participant-list", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("participant-values", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("instrument-status", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), + ("repeat-instrument", {"on": index_columns, "how": "inner", "suffixes": (None, '_merged')}), ] # @@ -182,153 +171,153 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. redcapLiveTransformConfig: Dict[str, Any] = { - "redcap_data_dir": "storage/release/raw-storage", - "project_metadata": { - "filepath": "AI-READI/REDCap", - "filename": "Redcap_project_metadata.json", + "redcap_data_dir": "storage/release/raw-storage", + "project_metadata": { + "filepath": "AI-READI/REDCap", + "filename": "Redcap_project_metadata.json", + }, + "redcap_api_url": "", + "redcap_api_key": "", + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] + { + "key": "participant-list", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_247884.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [], }, - "redcap_api_url": "", - "redcap_api_key": "", - "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] - { - "key": "participant-list", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307916.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [], - }, - { - "key": "participant-values", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307918.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("remap_values_by_columns", {"columns": data_columns}), - ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), - ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), - ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), - ( - "transform_values_by_column", - { - "column": "pacmpdat", - "new_column_name": "visitweek", - # ISO 8601 string format token for front-end: %V - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "pacmpdat", - "new_column_name": "visityear", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "pacmpdat", - "new_column_name": "visitdate", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), - "missing_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": phenotypes_column_map, - "new_column_name": "phenotypes", - "all_negative_value": "Control", - "default_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": treatments_column_map, - "new_column_name": "treatments", - "all_negative_value": "No Treatments", - "default_value": missing_value_generic, - }, - ), - ( - "keep_columns", - {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, - ), - ], - }, - { - "key": "instrument-status", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307920.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ( - "remap_values_by_columns", - {"columns": survey_columns, "value_map": survey_instrument_map}, - ), - ("map_missing_values_by_columns", {"columns": survey_columns}), - ("keep_columns", {"columns": index_columns + survey_columns}), - ], - }, - { - "key": "repeat-instrument", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307922.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("drop_rows", {"columns": repeat_survey_columns}), - ( - "aggregate_repeat_instrument_by_index", - {"aggregator": "max", "dtype": str}, - ), - ( - "keep_columns", - {"columns": index_columns + repeat_survey_data_columns}, - ), - ], - }, - ], - "post_transform_merge": ( - index_columns, redcap_report_merge_map - ), - "post_merge_transforms": [ + { + "key": "participant-values", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_242544.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("remap_values_by_columns", {"columns": data_columns}), + ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), + ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), + ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), ( - "remap_values_by_columns", - {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitweek", + # ISO 8601 string format token for front-end: %V + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, + "missing_value": missing_value_generic, + }, ), - ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), - ], - "index_columns": ["record_id"], - "missing_value_generic": missing_value_generic, + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visityear", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, + "missing_value": missing_value_generic, + }, + ), + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitdate", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), + "missing_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": phenotypes_column_map, + "new_column_name": "phenotypes", + "all_negative_value": "Control", + "default_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": treatments_column_map, + "new_column_name": "treatments", + "all_negative_value": "No Treatments", + "default_value": missing_value_generic, + }, + ), + ( + "keep_columns", + {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, + ), + ], + }, + { + "key": "instrument-status", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_251954.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ( + "remap_values_by_columns", + {"columns": survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": survey_columns}), + ("keep_columns", {"columns": index_columns + survey_columns}), + ], + }, + { + "key": "repeat-instrument", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_259920.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("drop_rows", {"columns": repeat_survey_columns}), + ( + "aggregate_repeat_instrument_by_index", + {"aggregator": "max", "dtype": str}, + ), + ( + "keep_columns", + {"columns": index_columns + repeat_survey_data_columns}, + ), + ], + }, + ], + "post_transform_merge": ( + index_columns, redcap_report_merge_map + ), + "post_merge_transforms": [ + ( + "remap_values_by_columns", + {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), + ], + "index_columns": ["record_id"], + "missing_value_generic": missing_value_generic, } # @@ -338,151 +327,151 @@ # Note: The REDCap report_id is matched to the transform # by the value of the key property in the report dictionary. redcapReleaseTransformConfig: Dict[str, Any] = { - "redcap_data_dir": "storage/release/raw-storage", - "project_metadata": { - "filepath": "AI-READI/REDCap", - "filename": "Redcap_project_metadata.json", + "redcap_data_dir": "storage/release/raw-storage", + "project_metadata": { + "filepath": "AI-READI/REDCap", + "filename": "Redcap_project_metadata.json", + }, + "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] + { + "key": "participant-list", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_247884.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [], }, - "reports": [ # Dict[str, Dict[str, str | Dict[str, Any] | List[Tuple[str, Dict[str, Any]]]]] - { - "key": "participant-list", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307916.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [], - }, - { - "key": "participant-values", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307918.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("remap_values_by_columns", {"columns": data_columns}), - ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), - ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), - ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), - ( - "transform_values_by_column", - { - "column": "pacmpdat", - "new_column_name": "visitweek", - # ISO 8601 string format token for front-end: %V - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "pacmpdat", - "new_column_name": "visityear", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, - "missing_value": missing_value_generic, - }, - ), - ( - "transform_values_by_column", - { - "column": "pacmpdat", - "new_column_name": "visitdate", - # ISO 8601 string format token for front-end: %Y - "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), - "missing_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": phenotypes_column_map, - "new_column_name": "phenotypes", - "all_negative_value": "Control", - "default_value": missing_value_generic, - }, - ), - ( - "new_column_from_binary_columns_positive_class", - { - "column_name_map": treatments_column_map, - "new_column_name": "treatments", - "all_negative_value": "No Treatments", - "default_value": missing_value_generic, - }, - ), - ( - "keep_columns", - {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, - ), - ], - }, - { - "key": "instrument-status", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307920.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ( - "remap_values_by_columns", - {"columns": survey_columns, "value_map": survey_instrument_map}, - ), - ("map_missing_values_by_columns", {"columns": survey_columns}), - ("keep_columns", {"columns": index_columns + survey_columns}), - ], - }, - { - "key": "repeat-instrument", - "filepath": "AI-READI/REDCap", - "filename": "Redcap_data_report_307922.csv", - "kwdargs": { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - "report_id": "", - }, - "transforms": [ - ("drop_rows", {"columns": repeat_survey_columns}), - ( - "aggregate_repeat_instrument_by_index", - {"aggregator": "max", "dtype": str}, - ), - ( - "keep_columns", - {"columns": index_columns + repeat_survey_data_columns}, - ), - ], - }, - ], - "post_transform_merge": ( - index_columns, redcap_report_merge_map - ), - "post_merge_transforms": [ + { + "key": "participant-values", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_242544.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("remap_values_by_columns", {"columns": data_columns}), + ("remap_values_by_columns", {"columns": ["export_group"], "value_map": export_group_map}), + ("remap_values_by_columns", {"columns": ["race_db"], "value_map": race_db_map}), + ("map_missing_values_by_columns", {"columns": data_columns + phase_2_columns}), ( - "remap_values_by_columns", - {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitweek", + # ISO 8601 string format token for front-end: %V + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().week, + "missing_value": missing_value_generic, + }, ), - ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), - ], - "index_columns": ["record_id"], - "missing_value_generic": missing_value_generic, + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visityear", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d").isocalendar().year, + "missing_value": missing_value_generic, + }, + ), + ( + "transform_values_by_column", + { + "column": "pacmpdat", + "new_column_name": "visitdate", + # ISO 8601 string format token for front-end: %Y + "transform": lambda x: datetime.strptime(x, "%Y-%m-%d"), + "missing_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": phenotypes_column_map, + "new_column_name": "phenotypes", + "all_negative_value": "Control", + "default_value": missing_value_generic, + }, + ), + ( + "new_column_from_binary_columns_positive_class", + { + "column_name_map": treatments_column_map, + "new_column_name": "treatments", + "all_negative_value": "No Treatments", + "default_value": missing_value_generic, + }, + ), + ( + "keep_columns", + {"columns": index_columns + data_columns + computed_columns + phase_2_columns}, + ), + ], + }, + { + "key": "instrument-status", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_251954.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ( + "remap_values_by_columns", + {"columns": survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": survey_columns}), + ("keep_columns", {"columns": index_columns + survey_columns}), + ], + }, + { + "key": "repeat-instrument", + "filepath": "AI-READI/REDCap", + "filename": "Redcap_data_report_259920.csv", + "kwdargs": { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": "", + "report_id": "", + }, + "transforms": [ + ("drop_rows", {"columns": repeat_survey_columns}), + ( + "aggregate_repeat_instrument_by_index", + {"aggregator": "max", "dtype": str}, + ), + ( + "keep_columns", + {"columns": index_columns + repeat_survey_data_columns}, + ), + ], + }, + ], + "post_transform_merge": ( + index_columns, redcap_report_merge_map + ), + "post_merge_transforms": [ + ( + "remap_values_by_columns", + {"columns": repeat_survey_columns, "value_map": survey_instrument_map}, + ), + ("map_missing_values_by_columns", {"columns": repeat_survey_data_columns}), + ], + "index_columns": ["record_id"], + "missing_value_generic": missing_value_generic, } @@ -492,4188 +481,4188 @@ # Survey Completions surveyCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "survey-completion-status-by-site", - "strict": True, - "transforms": [ - { - "name": "Demographics Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "demographics_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Health Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "health_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Substance Use Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "substance_use_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "CES-D-10 Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "cesd10_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PAID-5 DM Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "paid5_dm_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Diabetes Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "diabetes_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Dietary Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "dietary_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Opthalmic Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ophthalmic_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX SDOH Combined Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_sdoh_combined_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Food Insecurity Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_food_insecurity_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Neighborhood Environment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_neighborhood_environment_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_racial_ethnic_discrimination_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Medications Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "meds_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "survey-completion-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Recruitment Operations recruitmentOperationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "recruitment-operations-status-by-site", - "strict": True, - "transforms": [{ - "name": "Recruitment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "recruitment_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "FAQ Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "faq_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Screening Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "screening_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Preconsent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "preconsent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Consent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "consent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Staff Consent Attestation Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "staff_consent_attestation_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Study Enrollment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "study_enrollment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Driving Record", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "driving_record_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Distribution", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_distribution_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "recruitment-operations-status-by-site", + "strict": True, + "transforms": [{ + "name": "Recruitment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "recruitment_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "FAQ Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "faq_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Screening Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "screening_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Preconsent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Consent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Staff Consent Attestation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, { - "name": "Data Management Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "data_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "name": "Study Enrollment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "study_enrollment_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Driving Record", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "driving_record_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Data Management Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "data_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype Recruitment Counts by Site phenotypeRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-recruitment", - "strict": True, - "transforms": [ - { - "name": "Phenotype Recruitment", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["phenotypes", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-recruitment", + "strict": True, + "transforms": [ + { + "name": "Phenotype Recruitment", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["phenotypes", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype Recruitment Counts by Site phenotypeRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-recruitment-by-site", - "strict": True, - "transforms": [ - { - "name": "Phenotype Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["siteid", "phenotypes", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-recruitment-by-site", + "strict": True, + "transforms": [ + { + "name": "Phenotype Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["siteid", "phenotypes", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype Recruitment Counts by Phase phenotypeRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-recruitment-by-phase", - "strict": True, - "transforms": [ - { - "name": "Phenotype Recruitment by Phase", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["export_group", "phenotypes", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phase", - "field": "export_group", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Phenotype Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "phenotypes", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race Recruitment Counts by Phase raceRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-recruitment-by-phase", - "strict": True, - "transforms": [ - { - "name": "Race Recruitment by Phase", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["export_group", "race_db", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phase", - "field": "export_group", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "race_db", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race Recruitment Counts raceRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-recruitment", - "strict": True, - "transforms": [ - { - "name": "Race Recruitment", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["race_db", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-recruitment", + "strict": True, + "transforms": [ + { + "name": "Race Recruitment", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["race_db", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race Recruitment Counts by Site raceRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-recruitment-by-site", - "strict": True, - "transforms": [ - { - "name": "Race Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["siteid", "race_db", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-recruitment-by-site", + "strict": True, + "transforms": [ + { + "name": "Race Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["siteid", "race_db", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex Recruitment Counts sexRecruitmentTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-recruitment", - "strict": True, - "transforms": [ - { - "name": "Sex Recruitment", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["scrsex", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-recruitment", + "strict": True, + "transforms": [ + { + "name": "Sex Recruitment", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["scrsex", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex Recruitment Counts By Site sexRecruitmentBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-recruitment-by-site", - "strict": True, - "transforms": [ - { - "name": "Sex Recruitment by Site", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["siteid", "scrsex", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-recruitment-by-site", + "strict": True, + "transforms": [ + { + "name": "Sex Recruitment by Site", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["siteid", "scrsex", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex Counts by Phase sexRecruitmentByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-recruitment-by-phase", - "strict": True, - "transforms": [ - { - "name": "Sex Recruitment by Phase", - "vtype": "DoubleDiscreteTimeseries", - "methods": [ - { - "groups": ["export_group", "scrsex", "visitdate"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phase", - "field": "export_group", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "x": { - "name": "Week of the Year", - "field": "visitdate", - "missing_value": missing_value_generic, - "astype": str, - }, - "y": { - "name": "Cumulative Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-recruitment-by-phase", + "strict": True, + "transforms": [ + { + "name": "Sex Recruitment by Phase", + "vtype": "DoubleDiscreteTimeseries", + "methods": [ + { + "groups": ["export_group", "scrsex", "visitdate"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "x": { + "name": "Week of the Year", + "field": "visitdate", + "missing_value": missing_value_generic, + "astype": str, + }, + "y": { + "name": "Cumulative Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race & Sex Counts by Race raceSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-sex-by-site", - "strict": True, - "transforms": [ - { - "name": "Race & Sex by Site", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["scrsex", "race_db", "siteid"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-sex-by-site", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "race_db", "siteid"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Sex Counts by Race phenotypeSexBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-sex-by-site", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Sex by Site", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["scrsex", "phenotypes", "siteid"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-sex-by-site", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Sex by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "phenotypes", "siteid"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Site Counts by Sex phenotypeSiteBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-site-by-sex", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Site by Sex", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["scrsex", "phenotypes", "siteid"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-site-by-sex", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Site by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["scrsex", "phenotypes", "siteid"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Race Counts by Sex phenotypeRaceBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-race-by-sex", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Race by Sex", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-race-by-sex", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Race by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Race Counts by Phase phenotypeRaceByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-race-by-phase", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Race by Sex", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "export_group"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phase", - "field": "export_group", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-race-by-phase", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Race by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "export_group"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Phenotype & Sex Counts by Race phenotypeSexByRaceTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "phenotype-sex-by-race", - "strict": True, - "transforms": [ - { - "name": "Phenotype & Sex by Race", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "phenotype-sex-by-race", + "strict": True, + "transforms": [ + { + "name": "Phenotype & Sex by Race", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex & Phenotype Counts by Race sexPhenotypeByRaceTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-phenotype-by-race", - "strict": True, - "transforms": [ - { - "name": "Sex & Phenotype by Race", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-phenotype-by-race", + "strict": True, + "transforms": [ + { + "name": "Sex & Phenotype by Race", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Sex & Race Counts by Phenotype sexRaceByPhenotypeTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "sex-race-by-phenotype", - "strict": True, - "transforms": [ - { - "name": "Sex & Race by Phenotype", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "sex-race-by-phenotype", + "strict": True, + "transforms": [ + { + "name": "Sex & Race by Phenotype", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race & Sex Counts by Phase raceSexByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-sex-by-phase", - "strict": True, - "transforms": [ - { - "name": "Race & Sex by Phase", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["export_group", "race_db", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phase", - "field": "export_group", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-sex-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Phase", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["export_group", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race & Sex Counts by Phenotype raceSexByPhenotypeTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-sex-by-phenotype", - "strict": True, - "transforms": [ - { - "name": "Race & Sex by Phenotype", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-sex-by-phenotype", + "strict": True, + "transforms": [ + { + "name": "Race & Sex by Phenotype", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race & Phenotype Counts by Sex racePhenotypeBySexTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-phenotype-by-sex", - "strict": True, - "transforms": [ - { - "name": "Race & Phenotype by Sex", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-phenotype-by-sex", + "strict": True, + "transforms": [ + { + "name": "Race & Phenotype by Sex", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Race & Phenotype Counts by Phase racePhenotypeByPhaseTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "race-phenotype-by-phase", - "strict": True, - "transforms": [ - { - "name": "Race & Phenotype by Phase", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["phenotypes", "race_db", "export_group"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Phase", - "field": "export_group", - "missing_value": missing_value_generic, - }, - "group": { - "name": "Race", - "field": "race_db", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Phenotype", - "field": "phenotypes", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "simpleTransform", + { + "key": "race-phenotype-by-phase", + "strict": True, + "transforms": [ + { + "name": "Race & Phenotype by Phase", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["phenotypes", "race_db", "export_group"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Phase", + "field": "export_group", + "missing_value": missing_value_generic, + }, + "group": { + "name": "Race", + "field": "race_db", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Phenotype", + "field": "phenotypes", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) currentMedicationsBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "simpleTransform", - { - "key": "current-medications-by-site", - "strict": True, - "transforms": [ - { - "name": "Current Medications by Site", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "current_medications", "scrsex"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "name": "Current Medication Count", - "field": "current_medications", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Sex", - "field": "scrsex", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Participants (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - } + "simpleTransform", + { + "key": "current-medications-by-site", + "strict": True, + "transforms": [ + { + "name": "Current Medications by Site", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "current_medications", "scrsex"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "name": "Current Medication Count", + "field": "current_medications", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Sex", + "field": "scrsex", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Participants (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + } + ], + }, ) # Overview deviceCollectionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "device-collection-status-by-site", - "strict": True, - "transforms": [ - { - "name": "Device Distribution", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_distribution_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "BCVA", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "bcva_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Photopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "photopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Mesopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "mesopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Monofilament", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "monofilament_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "ECG Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ecg_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Lab Results Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "lab_results_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Specimen Management", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "specimen_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Return", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_return_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "device-collection-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "BCVA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Photopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Mesopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "mesopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Monofilament", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "monofilament_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "ECG Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ecg_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Lab Results Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "lab_results_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Specimen Management", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "specimen_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Return", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_return_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Overview instrumentCompletionStatusBySiteTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "instrument-completion-status-by-site", - "strict": True, - "transforms": [ - { - "name": "Recruitment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "recruitment_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Recruitment Survey", - "field": "recruitment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "FAQ Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "faq_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "FAQ Survey", - "field": "faq_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Screening Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "screening_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Screening Survey", - "field": "screening_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Preconsent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "preconsent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Preconsent Survey", - "field": "preconsent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Consent Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "consent_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Consent Survey", - "field": "consent_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Staff Consent Attestation Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "staff_consent_attestation_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Staff Consent Attestation Survey", - "field": "staff_consent_attestation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Demographics Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "demographics_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Health Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "health_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Substance Use Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "substance_use_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "CES-D-10 Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "cesd10_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PAID-5 DM Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "paid5_dm_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Diabetes Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "diabetes_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Dietary Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "dietary_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Opthalmic Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ophthalmic_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX SDOH Combined Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_sdoh_combined_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Food Insecurity Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "px_food_insecurity_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Neighborhood Environment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_neighborhood_environment_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": [ - "siteid", - "px_racial_ethnic_discrimination_survey_complete", - ], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Decline Participation Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "decline_participation_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Decline Participation Survey", - "field": "decline_participation_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Study Enrollment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "study_enrollment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Study Enrollment Survey", - "field": "study_enrollment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Driving Record", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "driving_record_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Driving Record", - "field": "driving_record_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Distribution", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_distribution_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Distribution", - "field": "device_distribution_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Medications Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "meds_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Physical Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "physical_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Physical Assessment", - "field": "physical_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Physical Assessment", - "field": "physical_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "BCVA", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "bcva_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "BCVA", - "field": "bcva_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Photopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "photopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Photopic MARS", - "field": "photopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Mesopic MARS", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "mesopic_mars_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Mesopic MARS", - "field": "mesopic_mars_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Monofilament", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "monofilament_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Monofilament", - "field": "monofilament_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "MOCA", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "moca_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "MOCA", - "field": "moca_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "MOCA", - "field": "moca_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "ECG Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "ecg_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "ECG Survey", - "field": "ecg_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Lab Results Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "lab_results_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Lab Results Survey", - "field": "lab_results_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Specimen Management", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "specimen_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Specimen Management", - "field": "specimen_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Device Return", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "device_return_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Device Return", - "field": "device_return_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Disposition Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "disposition_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Disposition Survey", - "field": "disposition_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Disposition Survey", - "field": "disposition_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Data Management Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["siteid", "data_management_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Site", - "field": "siteid", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Data Management Survey", - "field": "data_management_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "instrument-completion-status-by-site", + "strict": True, + "transforms": [ + { + "name": "Recruitment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "recruitment_survey_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Recruitment Survey", + "field": "recruitment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "FAQ Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "faq_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "FAQ Survey", + "field": "faq_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Screening Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "screening_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Screening Survey", + "field": "screening_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Preconsent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "preconsent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Preconsent Survey", + "field": "preconsent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Consent Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "consent_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Consent Survey", + "field": "consent_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Staff Consent Attestation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "staff_consent_attestation_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Staff Consent Attestation Survey", + "field": "staff_consent_attestation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "demographics_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_neighborhood_environment_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": [ + "siteid", + "px_racial_ethnic_discrimination_survey_complete", + ], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Decline Participation Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "decline_participation_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Decline Participation Survey", + "field": "decline_participation_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Study Enrollment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "study_enrollment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Study Enrollment Survey", + "field": "study_enrollment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Driving Record", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "driving_record_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Driving Record", + "field": "driving_record_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Distribution", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_distribution_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Distribution", + "field": "device_distribution_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Physical Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "physical_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Physical Assessment", + "field": "physical_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "BCVA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "bcva_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "BCVA", + "field": "bcva_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Photopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "photopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Photopic MARS", + "field": "photopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Mesopic MARS", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "mesopic_mars_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Mesopic MARS", + "field": "mesopic_mars_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Monofilament", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "monofilament_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Monofilament", + "field": "monofilament_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "MOCA", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "moca_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "MOCA", + "field": "moca_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "MOCA", + "field": "moca_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "ECG Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "ecg_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "ECG Survey", + "field": "ecg_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Lab Results Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "lab_results_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Lab Results Survey", + "field": "lab_results_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Specimen Management", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "specimen_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Specimen Management", + "field": "specimen_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Device Return", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "device_return_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Device Return", + "field": "device_return_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Disposition Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "disposition_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Disposition Survey", + "field": "disposition_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Disposition Survey", + "field": "disposition_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Data Management Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["siteid", "data_management_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Site", + "field": "siteid", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Data Management Survey", + "field": "data_management_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) # Overview surveyCompletionStatusTransformConfig: Tuple[str, Dict[str, Any]] = ( - "compoundTransform", - { - "key": "instrument-completion-status", - "strict": True, - "transforms": [ - { - "name": "Demographics Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["demographics_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Demographics Survey", - "field": "demographics_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Health Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["health_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Health Survey", - "field": "health_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Substance Use Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["substance_use_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Substance Use Survey", - "field": "substance_use_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "CES-D-10 Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["cesd10_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "CES-D-10 Survey", - "field": "cesd10_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PAID-5 DM Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["paid5_dm_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PAID-5 DM Survey", - "field": "paid5_dm_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Diabetes Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["diabetes_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Diabetes Survey", - "field": "diabetes_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Dietary Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["dietary_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Dietary Survey", - "field": "dietary_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Opthalmic Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["ophthalmic_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Opthalmic Survey", - "field": "ophthalmic_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX SDOH Combined Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_sdoh_combined_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX SDOH Combined Survey", - "field": "px_sdoh_combined_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Food Insecurity Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_food_insecurity_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Food Insecurity Survey", - "field": "px_food_insecurity_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Neighborhood Environment Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_neighborhood_environment_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Neighborhood Environment Survey", - "field": "px_neighborhood_environment_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["px_racial_ethnic_discrimination_survey_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "PhenX Racial and Ethnic Discrimination Survey", - "field": "px_racial_ethnic_discrimination_survey_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, - { - "name": "Medications Assessment", - "vtype": "DoubleCategorical", - "methods": [ - { - "groups": ["meds_assessment_complete"], - "value": "record_id", - "func": "count", - } - ], - "accessors": { - "filterby": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "group": { - "remap": lambda x: x["name"], - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "subgroup": { - "name": "Medications Assessment", - "field": "meds_assessment_complete", - "missing_value": missing_value_generic, - "astype": str, - }, - "value": { - "name": "Count (N)", - "field": "record_id", - "missing_value": missing_value_generic, - "astype": int, - }, - }, - }, + "compoundTransform", + { + "key": "instrument-completion-status", + "strict": True, + "transforms": [ + { + "name": "Demographics Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["demographics_survey_complete"], + "value": "record_id", + "func": "count", + } ], - }, + "accessors": { + "filterby": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Demographics Survey", + "field": "demographics_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Health Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["health_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Health Survey", + "field": "health_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Substance Use Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["substance_use_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Substance Use Survey", + "field": "substance_use_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "CES-D-10 Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["cesd10_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "CES-D-10 Survey", + "field": "cesd10_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PAID-5 DM Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["paid5_dm_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PAID-5 DM Survey", + "field": "paid5_dm_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Diabetes Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["diabetes_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Diabetes Survey", + "field": "diabetes_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Dietary Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["dietary_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Dietary Survey", + "field": "dietary_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Opthalmic Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["ophthalmic_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Opthalmic Survey", + "field": "ophthalmic_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX SDOH Combined Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_sdoh_combined_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX SDOH Combined Survey", + "field": "px_sdoh_combined_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Food Insecurity Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_food_insecurity_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Food Insecurity Survey", + "field": "px_food_insecurity_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Neighborhood Environment Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_neighborhood_environment_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Neighborhood Environment Survey", + "field": "px_neighborhood_environment_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["px_racial_ethnic_discrimination_survey_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "PhenX Racial and Ethnic Discrimination Survey", + "field": "px_racial_ethnic_discrimination_survey_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + { + "name": "Medications Assessment", + "vtype": "DoubleCategorical", + "methods": [ + { + "groups": ["meds_assessment_complete"], + "value": "record_id", + "func": "count", + } + ], + "accessors": { + "filterby": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "group": { + "remap": lambda x: x["name"], + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "subgroup": { + "name": "Medications Assessment", + "field": "meds_assessment_complete", + "missing_value": missing_value_generic, + "astype": str, + }, + "value": { + "name": "Count (N)", + "field": "record_id", + "missing_value": missing_value_generic, + "astype": int, + }, + }, + }, + ], + }, ) moduleTransformConfigs: Dict[str, Any] = { - "device-collection-status-by-site": deviceCollectionStatusBySiteTransformConfig, - "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, - "survey-completion-status": surveyCompletionStatusTransformConfig, - "survey-completion-status-by-site": surveyCompletionStatusBySiteTransformConfig, - "recruitment-operations-status-by-site": recruitmentOperationsBySiteTransformConfig, - "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, - "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, - "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, - "phenotype-race-by-phase": phenotypeRaceByPhaseTransformConfig, - "phenotype-sex-by-race": phenotypeSexByRaceTransformConfig, - "race-phenotype-by-sex": racePhenotypeBySexTransformConfig, - "race-phenotype-by-phase": racePhenotypeByPhaseTransformConfig, - "race-sex-by-phase": raceSexByPhaseTransformConfig, - "race-sex-by-phenotype": raceSexByPhenotypeTransformConfig, - "sex-phenotype-by-race": sexPhenotypeByRaceTransformConfig, - "sex-race-by-phenotype": sexRaceByPhenotypeTransformConfig, - "phenotype-recruitment": phenotypeRecruitmentTransformConfig, - "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, - "phenotype-recruitment-by-phase": phenotypeRecruitmentByPhaseTransformConfig, - "race-recruitment": raceRecruitmentTransformConfig, - "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, - "race-recruitment-by-phase": raceRecruitmentByPhaseTransformConfig, - "sex-recruitment": sexRecruitmentTransformConfig, - "sex-recruitment-by-site": sexRecruitmentBySiteTransformConfig, - "sex-recruitment-by-phase": sexRecruitmentByPhaseTransformConfig, - "race-sex-by-site": raceSexBySiteTransformConfig, - "current-medications-by-site": currentMedicationsBySiteTransformConfig, + "device-collection-status-by-site": deviceCollectionStatusBySiteTransformConfig, + "instrument-completion-status-by-site": instrumentCompletionStatusBySiteTransformConfig, + "survey-completion-status": surveyCompletionStatusTransformConfig, + "survey-completion-status-by-site": surveyCompletionStatusBySiteTransformConfig, + "recruitment-operations-status-by-site": recruitmentOperationsBySiteTransformConfig, + "phenotype-sex-by-site": phenotypeSexBySiteTransformConfig, + "phenotype-site-by-sex": phenotypeSiteBySexTransformConfig, + "phenotype-race-by-sex": phenotypeRaceBySexTransformConfig, + "phenotype-race-by-phase": phenotypeRaceByPhaseTransformConfig, + "phenotype-sex-by-race": phenotypeSexByRaceTransformConfig, + "race-phenotype-by-sex": racePhenotypeBySexTransformConfig, + "race-phenotype-by-phase": racePhenotypeByPhaseTransformConfig, + "race-sex-by-phase": raceSexByPhaseTransformConfig, + "race-sex-by-phenotype": raceSexByPhenotypeTransformConfig, + "sex-phenotype-by-race": sexPhenotypeByRaceTransformConfig, + "sex-race-by-phenotype": sexRaceByPhenotypeTransformConfig, + "phenotype-recruitment": phenotypeRecruitmentTransformConfig, + "phenotype-recruitment-by-site": phenotypeRecruitmentBySiteTransformConfig, + "phenotype-recruitment-by-phase": phenotypeRecruitmentByPhaseTransformConfig, + "race-recruitment": raceRecruitmentTransformConfig, + "race-recruitment-by-site": raceRecruitmentBySiteTransformConfig, + "race-recruitment-by-phase": raceRecruitmentByPhaseTransformConfig, + "sex-recruitment": sexRecruitmentTransformConfig, + "sex-recruitment-by-site": sexRecruitmentBySiteTransformConfig, + "sex-recruitment-by-phase": sexRecruitmentByPhaseTransformConfig, + "race-sex-by-site": raceSexBySiteTransformConfig, + "current-medications-by-site": currentMedicationsBySiteTransformConfig, } diff --git a/modules/etl/transforms/module_transform.py b/modules/etl/transforms/module_transform.py index 6619fe73..679bc096 100644 --- a/modules/etl/transforms/module_transform.py +++ b/modules/etl/transforms/module_transform.py @@ -1,11 +1,11 @@ # Library Modules from typing import Any, Callable, Union, List, Dict, Tuple from datetime import datetime -import logging, re, copy +import logging, copy, os import modules.etl.vtypes as vtypes # Third-Party Modules -import pandas as pd +import polars as pl class ModuleTransform(object): @@ -14,9 +14,6 @@ def __init__( config: Dict[str, Any], logging_config: Dict[str, str] = {}, ) -> None: - # - # Logging - # # Logging Config Checks self.logging_config = ( @@ -56,22 +53,21 @@ def __init__( raise ValueError( f"ModuleTransform argument transforms in config must be a list or dict type" ) - elif len(self.transforms) < 1: + if len(self.transforms) < 1: self.valid = False raise ValueError( f"ModuleTransform instantiation missing transforms in config argument" ) - else: - # Transform attribute is there and has one of the correct types (list, dict) - pass # Normalize Transforms to List Type, Check Validity, and Warn on Missing Attributes + valid = True for indexed_transform in enumerate(self.transforms): - self.valid = self._transformIsValid(indexed_transform) - if self.strict and not self.valid: - raise ValueError( - f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" - ) + if self.strict and not self._transformIsValid(indexed_transform): + valid = False + raise ValueError( + f"{self.key}:Missing properties in transforms argument, see log at {self.logging_config['filename']} for details" + ) + self.valid = valid self.logger.info(f"{self.key}:Initialized") @@ -161,40 +157,37 @@ def _setValueType( return pvalue - def simpleTransform(self, df: pd.DataFrame) -> object: + def simpleTransform(self, df: pl.DataFrame) -> object: """ - Performs a pd.DataFrame.groupby transform. The - df is first subset to the relevant fields. A - groupby function is then applied to the subset - to create a multi-index (hierarchy) by the - groups. An aggregate function is then applied - to the non-grouped column (e.g. count, sum). - - One transform for one VType. A single - visualization is then rendered to a single - visualization module. + Performs a group_by transform. The df is first subset to the + relevant fields. A group_by function is then applied to the + subset. """ self.transformed = [] transform: Dict[str, Any] = ( self.transforms.pop() - ) # simple transforms have only one transform object + ) # simple transforms have only one transform object name, vtype, methods, accessors = ( transform["name"], getattr(vtypes, transform["vtype"])(), transform["methods"], transform["accessors"], ) + if vtype.isvalid(df, accessors): - temp = df[ - list(set(accessor["field"] for key, accessor in accessors.items())) - ] + + # Select and Group + cols_to_select = list(set(accessor["field"] for key, accessor in accessors.items())) + temp = df.select(cols_to_select) for method in methods: groups, value, func = method["groups"], method["value"], method["func"] - grouped = temp.groupby(groups, as_index=False) - temp = getattr(grouped, func)() - transformed = temp + temp = temp.group_by(groups).agg( + getattr(pl.all().exclude(groups), func)() + ) - for record in transformed.to_dict("records"): + # Row-wise Transformation + transformed = temp + for record in transformed.to_dicts(): record = { key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() @@ -211,20 +204,10 @@ def simpleTransform(self, df: pd.DataFrame) -> object: return self - def compoundTransform(self, df: pd.DataFrame) -> object: + def compoundTransform(self, df: pl.DataFrame) -> object: """ - For each transform, performs a pd.DataFrame.groupby - transform. The df is first subset to the relevant - fields. A groupby function is then applied to the - subset to create a multi-index (hierarchy) by the - groups. An aggregate function is then applied to the - non-grouped column (e.g. count, sum). - - All transforms are combined into a single flat - transform. Transforms must be identical VType, - (e.g. [transformA, transformB, ...]). A single - (aggregated) visualization is then rendered to - a single visualization module. + For each transform, performs a group_by transform. + All transforms are combined into a single flat transform list. """ self.transformed = [] @@ -236,20 +219,23 @@ def compoundTransform(self, df: pd.DataFrame) -> object: transform["accessors"], ) if vtype.isvalid(df, accessors): - temp = df[ - list(set(accessor["field"] for key, accessor in accessors.items())) - ] + + # Select and Group + cols_to_select = list(set(accessor["field"] for key, accessor in accessors.items())) + temp = df.select(cols_to_select) for method in methods: groups, value, func = ( method["groups"], method["value"], method["func"], ) - grouped = temp.groupby(groups, as_index=False) - temp = getattr(grouped, func)() - transformed = temp + temp = temp.group_by(groups).agg( + getattr(pl.all().exclude(groups), func)() + ) - for record in transformed.to_dict("records"): + # Row-wise Transformation + transformed = temp + for record in transformed.to_dicts(): record = { key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() @@ -266,20 +252,10 @@ def compoundTransform(self, df: pd.DataFrame) -> object: return self - def mixedTransform(self, df: pd.DataFrame) -> object: + def mixedTransform(self, df: pl.DataFrame) -> object: """ - For each transform, performs a pd.DataFrame.groupby - transform. The df is first subset to the relevant - fields. A groupby function is then applied to the - subset to create a multi-index (hierarchy) by the - groups. An aggregate function is then applied to the - non-grouped column (e.g. count, sum). - - Transforms are kept distinct and inserted into a - dictionary, e.g. {nameA: transformA, nameB: transformB, - ...}. Transforms can be heterogenous VTypes. - Multiple visualizations are then rendered in the same - visualization module. + For each transform, performs a group_by transform. + Transforms are kept distinct and inserted into a dictionary by name. """ self.transformed = {} for transform in self.transforms: @@ -290,21 +266,24 @@ def mixedTransform(self, df: pd.DataFrame) -> object: transform["accessors"], ) if vtype.isvalid(df, accessors): - temp = df[ - list(set(accessor["field"] for key, accessor in accessors.items())) - ] + + # Select and Group + cols_to_select = list(set(accessor["field"] for key, accessor in accessors.items())) + temp = df.select(cols_to_select) for method in methods: groups, value, func = ( method["groups"], method["value"], method["func"], ) - grouped = temp.groupby(groups, as_index=False) - temp = getattr(grouped, func)() - transformed = temp + temp = temp.group_by(groups).agg( + getattr(pl.all().exclude(groups), func)() + ) + # Row-wise Transformation + transformed = temp subtransform = [] - for record in transformed.to_dict("records"): + for record in transformed.to_dicts(): record = { key: self._setValueType(vtype, name, record, key, accessors) for key, accessor in accessors.items() diff --git a/modules/etl/transforms/redcap_live_transform.py b/modules/etl/transforms/redcap_live_transform.py index da7c1635..0931f76b 100644 --- a/modules/etl/transforms/redcap_live_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -1,69 +1,69 @@ # Library Modules -from typing import Any, Callable, Union, List, Dict, Tuple -import re, os, csv, json, logging +from typing import Any, Callable, Union, List, Dict, Tuple, Literal +import re, os, logging, copy # Third Party Modules from redcap import Project -import pandas as pd +import polars as pl import numpy as np - class RedcapLiveTransform(object): - def __init__(self, config: dict) -> None: - - print("REDCap Live Transform") + def __init__(self, config: dict): # # Config # + # + self.config = copy.deepcopy(config) + # Get CWD self.cwd = os.getcwd() # REDCap API Config - self.redcap_api_url = config["redcap_api_url"] - self.redcap_api_key = config["redcap_api_key"] + self.redcap_api_url = self.config["redcap_api_url"] + self.redcap_api_key = self.config["redcap_api_key"] # Data Config self.index_columns = ( - config["index_columns"] if "index_columns" in config else ["record_id"] + self.config["index_columns"] if "index_columns" in self.config else ["record_id"] ) # REDCap Reports Config - self.reports_configs = config["reports"] if "reports" in config else [] + self.reports_configs = self.config["reports"] if "reports" in self.config else [] # Report Merging self.post_transform_merge = ( - config["post_transform_merge"] - if "post_transform_merge" in config + self.config["post_transform_merge"] + if "post_transform_merge" in self.config else ([], []) ) # Post Merge Transforms self.post_merge_transforms = ( - config["post_merge_transforms"] if "post_merge_transforms" in config else [] + self.config["post_merge_transforms"] if "post_merge_transforms" in self.config else [] ) # Column Value Separator self.multivalue_separator = ( - config["multivalue_separator"] if "multivalue_separator" in config else "|" + self.config["multivalue_separator"] if "multivalue_separator" in self.config else "|" ) # CSV Float Format (Default: "%.2f") self.csv_float_format = ( - config["csv_float_format"] if "csv_float_format" in config else "%.2f" + self.config["csv_float_format"] if "csv_float_format" in self.config else "%.2f" ) self.missing_value_generic = ( - config["missing_value_generic"] - if "missing_value_generic" in config + self.config["missing_value_generic"] + if "missing_value_generic" in self.config else "Value Unavailable" ) # Logging Config self.logging_config = ( - config["logging_config"] - if "logging_config" in config + self.config["logging_config"] + if "logging_config" in self.config else { "encoding": "utf-8", "filename": "REDCapETL.log", @@ -94,9 +94,10 @@ def __init__(self, config: dict) -> None: } # General Parsing Variables + # Note: Polars handles nulls differently (null vs NaN). + # We map standard "empty" markers to the generic missing value. self.none_values = [ np.nan, - pd.NaT, None, "nan", "NaN", @@ -108,17 +109,6 @@ def __init__(self, config: dict) -> None: self.logger.info(f"Initialized") - # - # PyCap Initialization - # - - # Initialize PyCap Objects - self.logger.info(f"Retrieving REDCap project data") - self.project = Project(self.redcap_api_url, self.redcap_api_key) - - # Load REDCap Project Metadata - self.metadata = self.project.export_metadata() - # # Setup Reports & Apply Transforms # @@ -130,84 +120,101 @@ def __init__(self, config: dict) -> None: "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": "", + "csv_delimiter": ",", } - # Get & Structure Report + + self.project: Any = None + self.reports: Dict[str, Any] = {} + + def run(self): + """ + Execute ETL + """ + + # + # PyCap Initialization + # + + # Initialize PyCap Objects + self.logger.info(f"Retrieving REDCap project data") + self.project = Project(self.redcap_api_url, self.redcap_api_key) + + # Load REDCap Project Metadata + self.metadata: Any = self.project.export_metadata() + self.logger.info(f"Retrieving Live REDCap reports") - self.reports = {} for report_config in self.reports_configs: # Get Report report_key = report_config["key"] report_kwdargs = report_config["kwdargs"] | self._default_report_kwdargs report_transforms = report_config["transforms"] - report = self.project.export_report(**report_kwdargs) + + # PyCap returns a list of dicts by default. + report_data: Any = self.project.export_report(**report_kwdargs) + + # Convert to Polars ensuring all columns are input as UTF8 Strings + if not report_data: + df = pl.DataFrame([]) + else: + # Calculate schema to force Utf8 to prevent type inference issues on ragged data + schema = {key: pl.Utf8 for key in set().union(*(d.keys() for d in report_data))} + df = pl.from_dicts(report_data, schema=schema) + # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], - "df": pd.DataFrame(report, dtype = str), + "df": df.rechunk(), "transforms": report_transforms, "transformed": None, - "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), + "annotation": self._get_redcap_type_metadata(df), } - # Apply Pre-Merge Report Transforms - self.logger.info(f"Applying REDCap report transforms") - for report_key, report_object in self.reports.items(): - self._apply_report_transforms(report_key) - - # Merge Reports - self.logger.info(f"Merging REDCap reports") - index_columns, merge_steps = self.post_transform_merge - self.merged = self._merge_reports(index_columns, merge_steps) - - # Apply Post-Merge Transforms - self.logger.info(f"Applying REDCap report post-merge transforms") - for transform, transform_kwdargs in self.post_merge_transforms: - self.merged = self.apply_transform( - self.merged, transform, transform_kwdargs - ) + try: - self.logger.info(f"REDCap transforms complete") + # Apply Pre-Merge Report Transforms + self.logger.info(f"Applying REDCap report transforms") + for report_key, report_object in self.reports.items(): + self._apply_report_transforms(report_key) - return + # Merge Reports + self.logger.info(f"Merging REDCap reports") + index_columns, merge_steps = self.post_transform_merge + self.merged = self._merge_reports(index_columns, merge_steps) + + # Apply Post-Merge Transforms + self.logger.info(f"Applying REDCap report post-merge transforms") + for transform, transform_kwdargs in self.post_merge_transforms: + self.merged = self.apply_transform( + self.merged, transform, transform_kwdargs + ) + + self.logger.info(f"REDCap transforms complete") + + except Exception as error: + self.logger.error(error) + self.logger.error("An error occurred during REDCap ETL. See above stacktrace.") + + return self # # Getters # def get_report_id(self, report_key: str) -> str: - """ - Returns a str instance of the REDCap report ID. - """ return self.reports[report_key]["id"] - def get_report_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report. - """ + def get_report_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["df"] - def get_report_transformed_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report - with normalization transforms applied. - """ + def get_report_transformed_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["transformed"] def get_report_transforms( self, report_key: str ) -> List[Tuple[str, Dict[str, Any]]]: - """ - Returns a list of transforms that will be applied to - the report - """ return self.reports[report_key]["transforms"] def get_report_annotations(self, report_key: str) -> List[Dict[str, Any]]: - """ - Returns a list of annotations generated from the - REDCap metadata API call. - """ return self.reports[report_key]["annotations"] # @@ -218,26 +225,41 @@ def _merge_reports( self, index_columns: List[str], merge_steps: List[Tuple[str, Dict[str, Any]]], - ) -> pd.DataFrame: - """ - Performs N - 1 merge transforms on N reports. - """ + ) -> pl.DataFrame: receiving_report_key, _ = merge_steps[0] - df_receiving_report = self.reports[receiving_report_key]["transformed"][ + df_receiving_report = self.reports[receiving_report_key]["transformed"].select( index_columns - ] + ) if len(merge_steps) > 0: for merge_step in merge_steps: providing_report_key, merge_kwdargs = merge_step df_providing_report = self.reports[providing_report_key]["transformed"] - df_receiving_report = df_receiving_report.merge( - df_providing_report, **merge_kwdargs + + # Map Pandas merge args to Polars join args + how = merge_kwdargs.get("how", "inner") + + # Handle on/left_on/right_on + on = merge_kwdargs.get("on", None) + left_on = merge_kwdargs.get("left_on", None) + right_on = merge_kwdargs.get("right_on", None) + + if not on and not left_on: + on = index_columns + df_receiving_report, df_providing_report = df_receiving_report.rechunk(), df_providing_report.rechunk() + df_receiving_report = df_receiving_report.join( + df_providing_report, + on=on, + left_on=left_on, + right_on=right_on, + how=how, + suffix=merge_kwdargs.get("suffixes", ("_x", "_y"))[1] + if "suffixes" in merge_kwdargs else "_right" ) else: self.logger.warn( - f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." + f"Unable to Merge – No merge steps provided, returning receiving_report pl.DataFrame." ) return df_receiving_report @@ -246,30 +268,25 @@ def _merge_reports( # Transform Applicator # - # Applies Declared Transforms to Reports def _apply_report_transforms(self, report_key: str) -> None: - """ - Interal method that applies the transforms to each - report as an idempotent transform stack. - """ report = self.reports[report_key] annotation = report["annotation"] - report["transformed"] = report["df"] + # Clone to avoid mutating original reference + report["transformed"] = report["df"].clone() for transform in report["transforms"]: transform_name, transform_kwdargs = transform transform_kwdargs = transform_kwdargs | {"annotation": annotation} report["transformed"] = self.apply_transform( report["transformed"], transform_name, transform_kwdargs ) - return def apply_transform( self, - df: pd.DataFrame, + df: pl.DataFrame, transform_name: str, transform_kwdargs: Dict[str, Any] = {}, - ) -> pd.DataFrame: + ) -> pl.DataFrame: return getattr(self, f"_{transform_name}")(df, **transform_kwdargs) # @@ -282,18 +299,16 @@ def apply_transform( def _drop_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df.drop(columns=columns) + if columns: + df = df.drop(columns) return df - def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Drop columns from pd.DataFrame. - """ + def drop_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._drop_columns(df=df, columns=columns) # @@ -302,21 +317,15 @@ def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _keep_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - columns = list( - set(df.columns) - - set(self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns)) - ) - df = df.drop(columns=columns) + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns) + df = df.select(columns) return df - def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Keep only selected columns in pd.DataFrame. - """ + def keep_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._keep_columns(df=df, columns=columns) # @@ -325,33 +334,24 @@ def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{name}{separator}{suffix}" - ) + rename_map = {col: f"{col}{separator}{suffix}" for col in columns} + df = df.rename(rename_map) return df def append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a suffix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the suffix is applied every - column. If no suffix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._append_column_suffix( df=df, columns=columns, suffix=suffix, separator=separator ) @@ -362,33 +362,24 @@ def append_column_suffix( def _prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{prefix}{separator}{name}" - ) + rename_map = {col: f"{prefix}{separator}{col}" for col in columns} + df = df.rename(rename_map) return df def prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a prefix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the prefix is applied every - column. If no prefix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._prepend_column_prefix( df=df, columns=columns, prefix=prefix, separator=separator ) @@ -399,12 +390,12 @@ def prepend_column_prefix( def _remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - # Resolve Mappable Fields and Available Value Maps + ) -> pl.DataFrame: + # Resolve Mappable Fields columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) mappable_fields: List[Dict[str, Any]] @@ -419,53 +410,44 @@ def _remap_values_by_columns( if len(field["options"]) > 0 and field["name"] in columns ] + # Vectorized Re-mapping + expressions = [] + for mappable_field in mappable_fields: - column, value_map = mappable_field["name"], mappable_field["options"] - for i, value in enumerate(df[column]): - subvalues = [ - subvalue.strip() - for subvalue in str(value).split(",") - if len(subvalue) > 0 - ] - remapped_value = self.multivalue_separator.join( - [ - value_map[subvalue] - for subvalue in subvalues - if subvalue in value_map.keys() - ] + column_name = mappable_field["name"] + mapping_options = mappable_field["options"] + + # Ensure keys in mapping are strings for replacement + str_mapping = {str(k): str(v) for k, v in mapping_options.items()} + + # 1. Split string by comma (handling potential multivalue fields) + # 2. Replace values in the list using the mapping (default to original if not found) + # 3. Join back with the configured separator + expr = ( + pl.col(column_name) + .cast(pl.Utf8) # Ensure string for splitting + .str.split(",") + .list.eval( + pl.element() + .str.strip_chars() # Strip whitespace from CSV parsing "1, 2" -> "2" + .replace(str_mapping, default=pl.element()) ) - df.loc[i, column] = remapped_value + .list.join(self.multivalue_separator) + .alias(column_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, - ) -> pd.DataFrame: - """ - Remap values by column using a list of annotations. - Each annotation is a dictionary containing a the - following keys: "name", "type", and "options". Key - to this method are then "name" and "options" entries. - The value of the "name" corresponds to the - pd.DataFrame column name. The value of the"options" - entry is a value_map object generated from the - REDCapo metadata API request: - - annotation = { - "name": field["field_name"], - "type": field["field_type"], - "options": field["field_options"] - } - - If multiple values are found in the field, they will - be mapped with a separator. The default separator is - a pipe (i.e. "|"). - - Returns a transformed pd.DataFrame - """ + ) -> pl.DataFrame: return self._remap_values_by_columns( df=df, columns=columns, value_map=value_map ) @@ -476,31 +458,55 @@ def remap_values_by_columns( def _transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - df[new_column_name] = df.loc[df[column] != missing_value, column].apply( - transform + ) -> pl.DataFrame: + # In Polars, using an arbitrary python callable (lambda) via map_elements + # is the equivalent of pandas apply. + + # FIX: The user's transform lambda (e.g., date functions) might return Integers/Floats. + # Polars map_elements with return_dtype=pl.Utf8 strictly enforces string returns. + # We wrap the transform in a helper that forces string conversion before returning to Polars. + + def safe_string_transform(val): + # If the value coming in is our known missing value, return it immediately + if val == str(missing_value): + return str(missing_value) + try: + # Apply user transform + result = transform(val) + # Force cast to string to satisfy pl.Utf8 return type + return str(result) if result is not None else str(missing_value) + except Exception: + # If transformation fails (e.g. date parse error), return missing value + return str(missing_value) + + df = df.with_columns( + pl.when(pl.col(column) != str(missing_value)) + .then( + pl.col(column).map_elements(safe_string_transform, return_dtype=pl.Utf8) + ) + .otherwise(pl.lit(str(missing_value))) + .alias(new_column_name) ) - df[new_column_name] = df[new_column_name].fillna(missing_value) + + # Ensure no actual nulls slip through + df = df.with_columns(pl.col(new_column_name).fill_null(str(missing_value))) + return df def transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + ) -> pl.DataFrame: return self._transform_values_by_column( df=df, column=column, @@ -515,31 +521,44 @@ def transform_values_by_column( def _map_missing_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], missing_value: Any = None, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) missing_value = ( missing_value if missing_value is not None else self.missing_value_generic ) - for column in columns: - for i, value in enumerate(df[column]): - if (len(str(value)) == 0) or (value in self.none_map.keys()): - df.loc[i, column] = missing_value - else: - continue + + # Vectorized update + expressions = [] + none_keys = list(self.none_map.keys()) + + for col_name in columns: + # Check for null, empty string, or "nan"/"NaN" string matches + is_missing = ( + pl.col(col_name).is_null() | + (pl.col(col_name) == "") | + (pl.col(col_name).is_in([str(k) for k in none_keys])) + ) + + expr = ( + pl.when(is_missing) + .then(pl.lit(str(missing_value))) # Ensure literal is string + .otherwise(pl.col(col_name)) + .alias(col_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def map_missing_values_by_columns( - self, df: pd.DataFrame, columns: List[str], missing_value: Any - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + self, df: pl.DataFrame, columns: List[str], missing_value: Any + ) -> pl.DataFrame: return self._map_missing_values_by_columns( df=df, columns=columns, missing_value=missing_value ) @@ -554,67 +573,92 @@ def map_missing_values_by_columns( def _drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], - condition: Callable = lambda column: column == "", + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] + if not columns: + return df + expressions = [condition(col) for col in columns] + mask = pl.any_horizontal(expressions).fill_null(False) + df = df.filter(~mask) + return df def drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], - condition: Callable = lambda column: column == "", - ) -> pd.DataFrame: - """ - Drop rows from pd.DataFrame. - """ - return self._drop_rows(df=df, columns=columns) + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", + ) -> pl.DataFrame: + return self._drop_rows(df=df, columns=columns, condition=condition) # # Transforms - Aggregation # - # ... - # # Transforms - Aggregate Repeat Instruments by Index # def _aggregate_repeat_instrument_by_index( self, - df: pd.DataFrame, - aggregator: str = "max", + df: pl.DataFrame, + aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - new_columns = df["redcap_repeat_instrument"].unique() - pivot = pd.pivot_table( - df, - index=self.index_columns, - columns=["redcap_repeat_instrument"], + ) -> pl.DataFrame: + + # Check if repeat instrument exists + if "redcap_repeat_instrument" not in df.columns: + return df + + df = df.filter( + pl.col("redcap_repeat_instrument").is_not_null() & + pl.all_horizontal(pl.col(c).is_not_null() for c in self.index_columns) + ) + + # Create the pivoted dataframe + df = df.rechunk() # Avoid Polars/Rust race condition + pivot_df = df.pivot( values="redcap_repeat_instance", - aggfunc=aggregator, - fill_value=self.missing_value_generic, + index=self.index_columns, + on="redcap_repeat_instrument", + aggregate_function=aggregator ) - df = df.merge(pivot, how="outer", on=self.index_columns) - df = df.drop_duplicates(self.index_columns, keep="first") + + # The pivot might introduce nulls, fill with missing generic + pivot_df = pivot_df.fill_null(self.missing_value_generic) + + # Merge back to original (outer join) + df_unique = df.unique(subset=self.index_columns, keep="first") + df_unique.rechunk() + df = df_unique.join(pivot_df, on=self.index_columns, how="left") + + # Cast new columns (all columns in pivot_df except index) + new_columns = [c for c in pivot_df.columns if c not in self.index_columns] + + # Map python types to Polars types + pl_type = pl.Float64 if dtype is float else pl.Int64 if dtype is int else pl.Utf8 + if dtype is int: pl_type = pl.Int64 + for column in new_columns: - df[column] = df[column].astype(dtype) + df = df.with_columns( + pl.when(pl.col(column) == self.missing_value_generic) + .then(None) + .otherwise(pl.col(column)) + .cast(pl_type, strict=False) + .alias(column) + ) + return df def aggregate_repeat_instrument_by_index( - self, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + self, df: pl.DataFrame, aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float + ) -> pl.DataFrame: return self._aggregate_repeat_instrument_by_index( df=df, aggregator=aggregator, dtype=dtype ) @@ -625,87 +669,103 @@ def aggregate_repeat_instrument_by_index( def _new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = "" - for column_name, column_value in column_name_map.items(): - df.loc[ - df[column_name] == "Yes", new_column_name - ] += f"{column_value}{self.multivalue_separator}" - for column_name, column_value in column_name_map.items(): - df.loc[ - (df[column_name] == default_value) & (df[new_column_name] == ""), - new_column_name, - ] = default_value - df.loc[df[new_column_name] == "", new_column_name] = all_negative_value - # Remove delimiter character if column ends with it - rgx = f"\\{self.multivalue_separator}$" - df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex=True) + + # Build a list of expressions: If Col == Yes then "Label|" else "" + concat_exprs = [] + for col_name, label in column_name_map.items(): + concat_exprs.append( + pl.when(pl.col(col_name) == "Yes") + .then(pl.lit(f"{label}{self.multivalue_separator}")) + .otherwise(pl.lit("")) + ) + + # Concatenate them all + full_str_col = pl.concat_str(concat_exprs) + + # Check for default value presence + any_default = pl.any_horizontal([ + pl.col(c) == default_value for c in column_name_map.keys() + ]) + + df = df.with_columns( + pl.when((full_str_col == "") & any_default) + .then(pl.lit(default_value)) + .when(full_str_col == "") + .then(pl.lit(all_negative_value)) + .otherwise( + # Remove trailing separator + full_str_col.str.strip_chars_end(self.multivalue_separator) + ) + .alias(new_column_name) + ) return df def new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_positive_class( df=df, column_name_map=column_name_map, new_column_name=new_column_name, + all_negative_value=all_negative_value, default_value=default_value, dtype=dtype, ) def _new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = df[list(column_name_map.keys())].idxmin(axis=1) + + target_cols = list(column_name_map.keys()) + idx_to_col = {i: name for i, name in enumerate(target_cols)} + + df = df.with_columns( + pl.concat_list([ + pl.col(c).cast(pl.Float64, strict=False) for c in target_cols + ]) + .list.arg_min() # Returns index of min value + .replace(idx_to_col) # Map index back to column name + .alias(new_column_name) + ) + return df def new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_negative_class( df=df, column_name_map=column_name_map, @@ -717,26 +777,25 @@ def new_column_from_binary_columns_negative_class( # Utilities # - # Transform Prelude - Get Applicable Transform Columns def _resolve_columns_with_dataframe( - self, df: pd.DataFrame, columns: List[str], default_columns: List[str] + self, df: pl.DataFrame, columns: List[str], default_columns: List[str] ) -> List[str]: """ Internal utility function. Uses set logic to ensure requested columns are available within the target - pd.DataFrame. + pl.DataFrame. """ available_columns, requested_columns = set(df.columns), set(columns) resolved_columns = [] if len(requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – columns parameter has no values. Defaulting to all df.columns" + f"Unexpected Transform – columns parameter has no values. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(available_columns & requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to all df.columns" + f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(requested_columns - available_columns) > 0: @@ -749,15 +808,14 @@ def _resolve_columns_with_dataframe( return resolved_columns - # Extract REDCap Type Metadata - def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: + def _get_redcap_type_metadata(self, df: pl.DataFrame) -> List[Dict[str, Any]]: """ Extracts REDCap field name, type, and options (the - metadata) for each column in the target pd.DataFrame + metadata) for each column in the target pl.DataFrame """ # REDCap Internal Variable Metadata - metadata = [ + metadata: List[Dict[str, Any]] = [ {"name": "redcap_data_access_group", "type": "text", "options": {}}, {"name": "redcap_repeat_instrument", "type": "text", "options": {}}, {"name": "redcap_repeat_instance", "type": "number", "options": {}}, @@ -770,14 +828,16 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: skip_types = {"file", "calc", "descriptive", "notes"} # Get Column Metadata - columns = df.columns.tolist() + columns = df.columns for field in sorted(self.metadata, key=lambda f: f["field_name"]): if field["field_name"] in columns: field_type = field["field_type"] options: dict = {} if field_type in complex_types: rgx = self._field_rgx[field_type] + # Parse choices string: "1, Yes | 2, No" for option in field["select_choices_or_calculations"].split("|"): + if "," not in option: continue k, v = ( option.split(",")[0], (",".join(option.split(",")[1:])).strip(), @@ -832,12 +892,12 @@ def export_raw( for report_key, report_object in self.reports.items(): filename = f"{report_key}_raw{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["df"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["df"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 # Approx match to %.2f ) return self @@ -848,12 +908,12 @@ def export_transformed( for report_key, report_object in self.reports.items(): filename = f"{report_key}_transformed{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["transformed"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["transformed"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self @@ -862,16 +922,15 @@ def export_merged_transformed( self, filepath: str = "transformed-merged_redcap-extract.tsv", separator: str = "\t" ) -> object: filepath = os.path.join(self.cwd, filepath) - self.merged.to_csv( + self.merged.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self - if __name__ == "__main__": - pass + pass else: - pass + pass diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py index 9cabd1cb..f18976a9 100644 --- a/modules/etl/transforms/redcap_release_transform.py +++ b/modules/etl/transforms/redcap_release_transform.py @@ -1,70 +1,69 @@ # Library Modules -from typing import Any, Callable, Union, List, Dict, Tuple -import re, os, csv, json, logging, datetime +from typing import Any, Callable, Union, List, Dict, Tuple, Literal +import re, os, logging, copy, json, io # Third Party Modules from azure.storage.blob import BlobServiceClient from redcap import Project -import pandas as pd +import polars as pl import numpy as np - class RedcapReleaseTransform(object): - def __init__(self, config: dict) -> None: - - print("REDCap Release Transform") + def __init__(self, config: dict): # # Config # + # + self.config = copy.deepcopy(config) + # Get CWD self.cwd = os.getcwd() - # REDCap Azure Storage Access Config - self.redcap_data_dir = config["redcap_data_dir"] + # REDCap API Config self.redcap_metadata_config = config["project_metadata"] # Data Config self.index_columns = ( - config["index_columns"] if "index_columns" in config else ["record_id"] + self.config["index_columns"] if "index_columns" in self.config else ["record_id"] ) # REDCap Reports Config - self.reports_configs = config["reports"] if "reports" in config else [] + self.reports_configs = self.config["reports"] if "reports" in self.config else [] # Report Merging self.post_transform_merge = ( - config["post_transform_merge"] - if "post_transform_merge" in config + self.config["post_transform_merge"] + if "post_transform_merge" in self.config else ([], []) ) # Post Merge Transforms self.post_merge_transforms = ( - config["post_merge_transforms"] if "post_merge_transforms" in config else [] + self.config["post_merge_transforms"] if "post_merge_transforms" in self.config else [] ) # Column Value Separator self.multivalue_separator = ( - config["multivalue_separator"] if "multivalue_separator" in config else "|" + self.config["multivalue_separator"] if "multivalue_separator" in self.config else "|" ) # CSV Float Format (Default: "%.2f") self.csv_float_format = ( - config["csv_float_format"] if "csv_float_format" in config else "%.2f" + self.config["csv_float_format"] if "csv_float_format" in self.config else "%.2f" ) self.missing_value_generic = ( - config["missing_value_generic"] - if "missing_value_generic" in config + self.config["missing_value_generic"] + if "missing_value_generic" in self.config else "Value Unavailable" ) # Logging Config self.logging_config = ( - config["logging_config"] - if "logging_config" in config + self.config["logging_config"] + if "logging_config" in self.config else { "encoding": "utf-8", "filename": "REDCapETL.log", @@ -95,9 +94,10 @@ def __init__(self, config: dict) -> None: } # General Parsing Variables + # Note: Polars handles nulls differently (null vs NaN). + # We map standard "empty" markers to the generic missing value. self.none_values = [ np.nan, - pd.NaT, None, "nan", "NaN", @@ -110,9 +110,24 @@ def __init__(self, config: dict) -> None: self.logger.info(f"Initialized") # - # PyCap Initialization + # Setup Reports & Apply Transforms # + # Internal Defaults + # - Key Assumptions for Transform Functions + # – Only Update if REDCap API and/or PyCap Update + self._default_report_kwdargs = { + "raw_or_label": "raw", + "raw_or_label_headers": "raw", + "export_checkbox_labels": False, + "csv_delimiter": ",", + } + + self.project: Any = None + self.reports: Dict[str, Any] = {} + + def run (self): + # Initialize PyCap Objects self.logger.info(f"Retrieving REDCap project data") @@ -123,22 +138,9 @@ def __init__(self, config: dict) -> None: f"{self.redcap_metadata_config['filepath']}/{self.redcap_metadata_config['filename']}" ) - # - # Setup Reports & Apply Transforms - # - - # Internal Defaults - # - Key Assumptions for Transform Functions - # – Only Update if REDCap API and/or PyCap Update - self._default_report_kwdargs = { - "raw_or_label": "raw", - "raw_or_label_headers": "raw", - "export_checkbox_labels": False, - "csv_delimiter": "", - } # Get & Structure Report self.logger.info(f"Retrieving Stored REDCap reports") - self.reports = {} + for report_config in self.reports_configs: # Get Report report_key = report_config["key"] @@ -146,40 +148,47 @@ def __init__(self, config: dict) -> None: report_transforms = report_config["transforms"] # Load Release REDCap Reports - report = report_dataframe = self.get_stored_report( + df = self.get_stored_report( os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_SAS_CONNECTION") or "", os.environ.get("FAIRHUB_BLOB_STORAGE_REDCAP_ETL_CONTAINER") or "", f"{report_config['filepath']}/{report_config['filename']}" ) + # Structure Reports self.reports[report_key] = { "id": report_kwdargs["report_id"], - "df": report_dataframe, + "df": df.rechunk(), "transforms": report_transforms, "transformed": None, - "annotation": self._get_redcap_type_metadata(pd.DataFrame(report)), + "annotation": self._get_redcap_type_metadata(df), } - # Apply Pre-Merge Report Transforms - self.logger.info(f"Applying REDCap report transforms") - for report_key, report_object in self.reports.items(): - self._apply_report_transforms(report_key) - - # Merge Reports - self.logger.info(f"Merging REDCap reports") - index_columns, merge_steps = self.post_transform_merge - self.merged = self._merge_reports(index_columns, merge_steps) - - # Apply Post-Merge Transforms - self.logger.info(f"Applying REDCap report post-merge transforms") - for transform, transform_kwdargs in self.post_merge_transforms: - self.merged = self.apply_transform( - self.merged, transform, transform_kwdargs - ) + try: - self.logger.info(f"REDCap transforms complete") + # Apply Pre-Merge Report Transforms + self.logger.info(f"Applying REDCap report transforms") + for report_key, report_object in self.reports.items(): + self._apply_report_transforms(report_key) - return + # Merge Reports + self.logger.info(f"Merging REDCap reports") + index_columns, merge_steps = self.post_transform_merge + self.merged = self._merge_reports(index_columns, merge_steps) + + # Apply Post-Merge Transforms + self.logger.info(f"Applying REDCap report post-merge transforms") + for transform, transform_kwdargs in self.post_merge_transforms: + self.merged = self.apply_transform( + self.merged, transform, transform_kwdargs + ) + + self.logger.info(f"REDCap transforms complete") + + except Exception as error: + self.logger.error(error) + self.logger.error("An error occurred during REDCap ETL. See above stacktrace.") + + return self # # Getters @@ -197,50 +206,64 @@ def get_stored_project_metadata(self, connection_string: str, container_name: st return json.loads(download_stream.readall()) - def get_stored_report(self, connection_string: str, container_name: str, blob_path: str) -> pd.DataFrame: + def get_stored_report(self, connection_string: str, container_name: str, blob_path: str) -> pl.DataFrame: + + self.logger.info(f"Downloading report blob: {blob_path}") + try: + blob_service_client = BlobServiceClient.from_connection_string(connection_string) + container_client = blob_service_client.get_container_client(container_name) + blob_client = container_client.get_blob_client(blob_path) + + # Get Blob as bytes stream + download_stream = blob_client.download_blob() + blob_bytes = download_stream.readall() + + # Use io.BytesIO to wrap the bytes content into a file-like object + # Polars can read directly from this buffer. + report_buffer = io.BytesIO(blob_bytes) + + # Read CSV directly into a Polars DataFrame + if not report_buffer: + df = pl.DataFrame([]) + else: + # Calculate schema to force Utf8 to prevent type inference issues on ragged data + df = pl.read_csv( + report_buffer, + separator=",", + infer_schema_length=0, # Helps with performance/consistency + schema_overrides={'*': pl.Utf8} + ) - # Connect to Azure Blog Storage - blob_service_client = BlobServiceClient.from_connection_string(connection_string) - container_client = blob_service_client.get_container_client(container_name) - blob_client = container_client.get_blob_client(blob_path) - # Get Blob - df = pd.read_csv(blob_client.download_blob(), dtype = str) - return df + self.logger.info(f"Successfully loaded report into Polars DataFrame with shape {df.shape}") + return df + + except Exception as e: + self.logger.error(f"Failed to retrieve or read Azure Blob report at {blob_path}: {e}") + # Return an empty Polars DataFrame on failure + return pl.DataFrame({}) + + + + # + # Getters + # def get_report_id(self, report_key: str) -> str: - """ - Returns a str instance of the REDCap report ID. - """ return self.reports[report_key]["id"] - def get_report_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report. - """ + def get_report_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["df"] - def get_report_transformed_df(self, report_key: str) -> pd.DataFrame: - """ - Returns a pd.DataFrame instance containing the report - with normalization transforms applied. - """ + def get_report_transformed_df(self, report_key: str) -> pl.DataFrame: return self.reports[report_key]["transformed"] def get_report_transforms( self, report_key: str ) -> List[Tuple[str, Dict[str, Any]]]: - """ - Returns a list of transforms that will be applied to - the report - """ return self.reports[report_key]["transforms"] def get_report_annotations(self, report_key: str) -> List[Dict[str, Any]]: - """ - Returns a list of annotations generated from the - REDCap metadata API call. - """ return self.reports[report_key]["annotations"] # @@ -251,26 +274,46 @@ def _merge_reports( self, index_columns: List[str], merge_steps: List[Tuple[str, Dict[str, Any]]], - ) -> pd.DataFrame: - """ - Performs N - 1 merge transforms on N reports. - """ + ) -> pl.DataFrame: receiving_report_key, _ = merge_steps[0] - df_receiving_report = self.reports[receiving_report_key]["transformed"][ + df_receiving_report = self.reports[receiving_report_key]["transformed"].select( index_columns - ] + ) if len(merge_steps) > 0: for merge_step in merge_steps: providing_report_key, merge_kwdargs = merge_step df_providing_report = self.reports[providing_report_key]["transformed"] - df_receiving_report = df_receiving_report.merge( - df_providing_report, **merge_kwdargs + + # Ensure Reports Aren't None + if df_receiving_report is None or df_providing_report is None: + self.logger.warn(f"Skipping merge step: {merge_step}") + continue + + # Map Pandas merge args to Polars join args + how = merge_kwdargs.get("how", "inner") + + # Handle on/left_on/right_on + on = merge_kwdargs.get("on", None) + left_on = merge_kwdargs.get("left_on", None) + right_on = merge_kwdargs.get("right_on", None) + + if not on and not left_on: + on = index_columns + df_receiving_report, df_providing_report = df_receiving_report.rechunk(), df_providing_report.rechunk() + df_receiving_report = df_receiving_report.join( + df_providing_report, + on=on, + left_on=left_on, + right_on=right_on, + how=how, + suffix=merge_kwdargs.get("suffixes", ("_x", "_y"))[1] + if "suffixes" in merge_kwdargs else "_right" ) else: self.logger.warn( - f"Unable to Merge – No merge steps provided, returning receiving_report pd.DataFrame." + f"Unable to Merge – No merge steps provided, returning receiving_report pl.DataFrame." ) return df_receiving_report @@ -279,31 +322,27 @@ def _merge_reports( # Transform Applicator # - # Applies Declared Transforms to Reports def _apply_report_transforms(self, report_key: str) -> None: - """ - Interal method that applies the transforms to each - report as an idempotent transform stack. - """ report = self.reports[report_key] annotation = report["annotation"] - report["transformed"] = report["df"] + # Clone to avoid mutating original reference + report["transformed"] = report["df"].clone() for transform in report["transforms"]: transform_name, transform_kwdargs = transform transform_kwdargs = transform_kwdargs | {"annotation": annotation} report["transformed"] = self.apply_transform( report["transformed"], transform_name, transform_kwdargs ) - return def apply_transform( self, - df: pd.DataFrame, + df: pl.DataFrame, transform_name: str, transform_kwdargs: Dict[str, Any] = {}, - ) -> pd.DataFrame: - return getattr(self, f"_{transform_name}")(df, **transform_kwdargs) + ) -> pl.DataFrame: + df = getattr(self, f"_{transform_name}")(df, **transform_kwdargs) + return df # # Transforms - Columns @@ -315,18 +354,16 @@ def apply_transform( def _drop_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df.drop(columns=columns) + if columns: + df = df.drop(columns) return df - def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Drop columns from pd.DataFrame. - """ + def drop_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._drop_columns(df=df, columns=columns) # @@ -335,21 +372,15 @@ def drop_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _keep_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - columns = list( - set(df.columns) - - set(self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns)) - ) - df = df.drop(columns=columns) + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=df.columns) + df = df.select(columns) return df - def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: - """ - Keep only selected columns in pd.DataFrame. - """ + def keep_columns(self, df: pl.DataFrame, columns: List[str]) -> pl.DataFrame: return self._keep_columns(df=df, columns=columns) # @@ -358,33 +389,24 @@ def keep_columns(self, df: pd.DataFrame, columns: List[str]) -> pd.DataFrame: def _append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{name}{separator}{suffix}" - ) + rename_map = {col: f"{col}{separator}{suffix}" for col in columns} + df = df.rename(rename_map) return df def append_column_suffix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], suffix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a suffix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the suffix is applied every - column. If no suffix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._append_column_suffix( df=df, columns=columns, suffix=suffix, separator=separator ) @@ -395,33 +417,24 @@ def append_column_suffix( def _prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df[columns] = df[columns].rename( - mapper=lambda name: f"{prefix}{separator}{name}" - ) + rename_map = {col: f"{prefix}{separator}{col}" for col in columns} + df = df.rename(rename_map) return df def prepend_column_prefix( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], prefix: str = "", separator: str = "", - ) -> pd.DataFrame: - """ - Append a prefix to columns of pd.DataFrame. Note: If no - columns parameter is provided, the prefix is applied every - column. If no prefix is provided, the column names remain - unchanged. A separator argument allows for the expansion - of column names by one or more characters, e.g. "_" for - snakecase. - """ + ) -> pl.DataFrame: return self._prepend_column_prefix( df=df, columns=columns, prefix=prefix, separator=separator ) @@ -432,12 +445,12 @@ def prepend_column_prefix( def _remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - # Resolve Mappable Fields and Available Value Maps + ) -> pl.DataFrame: + # Resolve Mappable Fields columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) mappable_fields: List[Dict[str, Any]] @@ -452,53 +465,44 @@ def _remap_values_by_columns( if len(field["options"]) > 0 and field["name"] in columns ] + # Vectorized Re-mapping + expressions = [] + for mappable_field in mappable_fields: - column, value_map = mappable_field["name"], mappable_field["options"] - for i, value in enumerate(df[column]): - subvalues = [ - subvalue.strip() - for subvalue in str(value).split(",") - if len(subvalue) > 0 - ] - remapped_value = self.multivalue_separator.join( - [ - value_map[subvalue] - for subvalue in subvalues - if subvalue in value_map.keys() - ] + column_name = mappable_field["name"] + mapping_options = mappable_field["options"] + + # Ensure keys in mapping are strings for replacement + str_mapping = {str(k): str(v) for k, v in mapping_options.items()} + + # 1. Split string by comma (handling potential multivalue fields) + # 2. Replace values in the list using the mapping (default to original if not found) + # 3. Join back with the configured separator + expr = ( + pl.col(column_name) + .cast(pl.Utf8) # Ensure string for splitting + .str.split(",") + .list.eval( + pl.element() + .str.strip_chars() # Strip whitespace from CSV parsing "1, 2" -> "2" + .replace(str_mapping, default=pl.element()) ) - df.loc[i, column] = remapped_value + .list.join(self.multivalue_separator) + .alias(column_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def remap_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], value_map: Dict[str, Any] = {}, - ) -> pd.DataFrame: - """ - Remap values by column using a list of annotations. - Each annotation is a dictionary containing a the - following keys: "name", "type", and "options". Key - to this method are then "name" and "options" entries. - The value of the "name" corresponds to the - pd.DataFrame column name. The value of the"options" - entry is a value_map object generated from the - REDCapo metadata API request: - - annotation = { - "name": field["field_name"], - "type": field["field_type"], - "options": field["field_options"] - } - - If multiple values are found in the field, they will - be mapped with a separator. The default separator is - a pipe (i.e. "|"). - - Returns a transformed pd.DataFrame - """ + ) -> pl.DataFrame: return self._remap_values_by_columns( df=df, columns=columns, value_map=value_map ) @@ -509,31 +513,55 @@ def remap_values_by_columns( def _transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - df[new_column_name] = df.loc[df[column] != missing_value, column].apply( - transform + ) -> pl.DataFrame: + # In Polars, using an arbitrary python callable (lambda) via map_elements + # is the equivalent of pandas apply. + + # FIX: The user's transform lambda (e.g., date functions) might return Integers/Floats. + # Polars map_elements with return_dtype=pl.Utf8 strictly enforces string returns. + # We wrap the transform in a helper that forces string conversion before returning to Polars. + + def safe_string_transform(val): + # If the value coming in is our known missing value, return it immediately + if val == str(missing_value): + return str(missing_value) + try: + # Apply user transform + result = transform(val) + # Force cast to string to satisfy pl.Utf8 return type + return str(result) if result is not None else str(missing_value) + except Exception: + # If transformation fails (e.g. date parse error), return missing value + return str(missing_value) + + df = df.with_columns( + pl.when(pl.col(column) != str(missing_value)) + .then( + pl.col(column).map_elements(safe_string_transform, return_dtype=pl.Utf8) + ) + .otherwise(pl.lit(str(missing_value))) + .alias(new_column_name) ) - df[new_column_name] = df[new_column_name].fillna(missing_value) + + # Ensure no actual nulls slip through + df = df.with_columns(pl.col(new_column_name).fill_null(str(missing_value))) + return df def transform_values_by_column( self, - df: pd.DataFrame, + df: pl.DataFrame, column: str, new_column_name: str, transform: Callable, missing_value: Any, - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + ) -> pl.DataFrame: return self._transform_values_by_column( df=df, column=column, @@ -548,31 +576,44 @@ def transform_values_by_column( def _map_missing_values_by_columns( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], missing_value: Any = None, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) missing_value = ( missing_value if missing_value is not None else self.missing_value_generic ) - for column in columns: - for i, value in enumerate(df[column]): - if (len(str(value)) == 0) or (value in self.none_map.keys()): - df.loc[i, column] = missing_value - else: - continue + + # Vectorized update + expressions = [] + none_keys = list(self.none_map.keys()) + + for col_name in columns: + # Check for null, empty string, or "nan"/"NaN" string matches + is_missing = ( + pl.col(col_name).is_null() | + (pl.col(col_name) == "") | + (pl.col(col_name).is_in([str(k) for k in none_keys])) + ) + + expr = ( + pl.when(is_missing) + .then(pl.lit(str(missing_value))) # Ensure literal is string + .otherwise(pl.col(col_name)) + .alias(col_name) + ) + expressions.append(expr) + + if expressions: + df = df.with_columns(expressions) return df def map_missing_values_by_columns( - self, df: pd.DataFrame, columns: List[str], missing_value: Any - ) -> pd.DataFrame: - """ - Replace 0-length values or values with keys in - self.none_map with self.missing_value_generic. - """ + self, df: pl.DataFrame, columns: List[str], missing_value: Any + ) -> pl.DataFrame: return self._map_missing_values_by_columns( df=df, columns=columns, missing_value=missing_value ) @@ -587,67 +628,92 @@ def map_missing_values_by_columns( def _drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str] = [], - condition: Callable = lambda column: column == "", + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: + columns = self._resolve_columns_with_dataframe(df=df, columns=columns, default_columns=[]) - df = df[~df[columns].apply(lambda column: column.apply(condition)).any(axis=1)] + if not columns: + return df + expressions = [condition(col) for col in columns] + mask = pl.any_horizontal(expressions).fill_null(False) + df = df.filter(~mask) + return df def drop_rows( self, - df: pd.DataFrame, + df: pl.DataFrame, columns: List[str], - condition: Callable = lambda column: column == "", - ) -> pd.DataFrame: - """ - Drop rows from pd.DataFrame. - """ - return self._drop_rows(df=df, columns=columns) + condition: Callable[[str], pl.Expr] = lambda col_name: pl.col(col_name) == "", + ) -> pl.DataFrame: + return self._drop_rows(df=df, columns=columns, condition=condition) # # Transforms - Aggregation # - # ... - # # Transforms - Aggregate Repeat Instruments by Index # def _aggregate_repeat_instrument_by_index( self, - df: pd.DataFrame, - aggregator: str = "max", + df: pl.DataFrame, + aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: - new_columns = [column for column in df["redcap_repeat_instrument"].unique() if column is not np.nan] - pivot = pd.pivot_table( - df, - index=self.index_columns, - columns=["redcap_repeat_instrument"], + ) -> pl.DataFrame: + + # Check if repeat instrument exists + if "redcap_repeat_instrument" not in df.columns: + return df + + df = df.filter( + pl.col("redcap_repeat_instrument").is_not_null() & + pl.all_horizontal(pl.col(c).is_not_null() for c in self.index_columns) + ) + + # Create the pivoted dataframe + df = df.rechunk() # Avoid Polars/Rust race condition + pivot_df = df.pivot( values="redcap_repeat_instance", - aggfunc=aggregator, - fill_value=self.missing_value_generic, + index=self.index_columns, + on="redcap_repeat_instrument", + aggregate_function=aggregator ) - df = df.merge(pivot, how="outer", on=self.index_columns) - df = df.drop_duplicates(self.index_columns, keep="first") + + # The pivot might introduce nulls, fill with missing generic + pivot_df = pivot_df.fill_null(self.missing_value_generic) + + # Merge back to original (outer join) + df_unique = df.unique(subset=self.index_columns, keep="first") + df_unique.rechunk() + df = df_unique.join(pivot_df, on=self.index_columns, how="left") + + # Cast new columns (all columns in pivot_df except index) + new_columns = [c for c in pivot_df.columns if c not in self.index_columns] + + # Map python types to Polars types + pl_type = pl.Float64 if dtype is float else pl.Int64 if dtype is int else pl.Utf8 + if dtype is int: pl_type = pl.Int64 + for column in new_columns: - df[column] = df[column].astype(dtype) + df = df.with_columns( + pl.when(pl.col(column) == self.missing_value_generic) + .then(None) + .otherwise(pl.col(column)) + .cast(pl_type, strict=False) + .alias(column) + ) + return df def aggregate_repeat_instrument_by_index( - self, df: pd.DataFrame, aggregator: str = "max", dtype: Callable = float - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + self, df: pl.DataFrame, aggregator: Literal['min', 'max', 'first', 'last', 'sum', 'mean', 'median', 'len'] | pl.Expr | None = "max", dtype: Callable = float + ) -> pl.DataFrame: return self._aggregate_repeat_instrument_by_index( df=df, aggregator=aggregator, dtype=dtype ) @@ -658,88 +724,103 @@ def aggregate_repeat_instrument_by_index( def _new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, annotation: List[Dict[str, Any]] = [], - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = "" - for column_name, column_value in column_name_map.items(): - df.loc[ - df[column_name] == "Yes", new_column_name - ] += f"{column_value}{self.multivalue_separator}" - for column_name, column_value in column_name_map.items(): - df.loc[ - (df[column_name] == default_value) & (df[new_column_name] == ""), - new_column_name, - ] = default_value - df.loc[df[new_column_name] == "", new_column_name] = all_negative_value - # Remove delimiter character if column ends with it - rgx = f"\\{self.multivalue_separator}$" - df[new_column_name] = df[new_column_name].str.replace(rgx, "", regex=True) - return df + # Build a list of expressions: If Col == Yes then "Label|" else "" + concat_exprs = [] + for col_name, label in column_name_map.items(): + concat_exprs.append( + pl.when(pl.col(col_name) == "Yes") + .then(pl.lit(f"{label}{self.multivalue_separator}")) + .otherwise(pl.lit("")) + ) + # Concatenate them all + full_str_col = pl.concat_str(concat_exprs) + + # Check for default value presence + any_default = pl.any_horizontal([ + pl.col(c) == default_value for c in column_name_map.keys() + ]) + + df = df.with_columns( + pl.when((full_str_col == "") & any_default) + .then(pl.lit(default_value)) + .when(full_str_col == "") + .then(pl.lit(all_negative_value)) + .otherwise( + # Remove trailing separator + full_str_col.str.strip_chars_end(self.multivalue_separator) + ) + .alias(new_column_name) + ) + + return df def new_column_from_binary_columns_positive_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", all_negative_value: str = "", default_value: str | None = "Value Unavailable", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_positive_class( df=df, column_name_map=column_name_map, new_column_name=new_column_name, + all_negative_value=all_negative_value, default_value=default_value, dtype=dtype, ) def _new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: + ) -> pl.DataFrame: new_column_name = ( new_column_name if len(new_column_name) > 0 else "_".join(column_name_map.keys()) ) - df[new_column_name] = df[list(column_name_map.keys())].idxmin(axis=1) + + target_cols = list(column_name_map.keys()) + idx_to_col = {i: name for i, name in enumerate(target_cols)} + + df = df.with_columns( + pl.concat_list([ + pl.col(c).cast(pl.Float64, strict=False) for c in target_cols + ]) + .list.arg_min() # Returns index of min value + .replace(idx_to_col) # Map index back to column name + .alias(new_column_name) + ) + return df def new_column_from_binary_columns_negative_class( self, - df: pd.DataFrame, + df: pl.DataFrame, column_name_map: dict, new_column_name: str = "", dtype: Callable = float, - ) -> pd.DataFrame: - """ - Pre-processing REDCap repeat_instrument so each instrument - has its own column and the value. The value is computed - using an aggregation function applied to the repeat_instance - field. - """ + ) -> pl.DataFrame: return self._new_column_from_binary_columns_negative_class( df=df, column_name_map=column_name_map, @@ -751,26 +832,25 @@ def new_column_from_binary_columns_negative_class( # Utilities # - # Transform Prelude - Get Applicable Transform Columns def _resolve_columns_with_dataframe( - self, df: pd.DataFrame, columns: List[str], default_columns: List[str] + self, df: pl.DataFrame, columns: List[str], default_columns: List[str] ) -> List[str]: """ Internal utility function. Uses set logic to ensure requested columns are available within the target - pd.DataFrame. + pl.DataFrame. """ available_columns, requested_columns = set(df.columns), set(columns) resolved_columns = [] if len(requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – columns parameter has no values. Defaulting to all df.columns" + f"Unexpected Transform – columns parameter has no values. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(available_columns & requested_columns) == 0: self.logger.warn( - f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to all df.columns" + f"Unexpected Transform – none of the requested columns were found in df.columns. Defaulting to provided default_columns" ) resolved_columns = default_columns elif len(requested_columns - available_columns) > 0: @@ -783,15 +863,14 @@ def _resolve_columns_with_dataframe( return resolved_columns - # Extract REDCap Type Metadata - def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: + def _get_redcap_type_metadata(self, df: pl.DataFrame) -> List[Dict[str, Any]]: """ Extracts REDCap field name, type, and options (the - metadata) for each column in the target pd.DataFrame + metadata) for each column in the target pl.DataFrame """ # REDCap Internal Variable Metadata - metadata = [ + metadata: List[Dict[str, Any]] = [ {"name": "redcap_data_access_group", "type": "text", "options": {}}, {"name": "redcap_repeat_instrument", "type": "text", "options": {}}, {"name": "redcap_repeat_instance", "type": "number", "options": {}}, @@ -804,14 +883,16 @@ def _get_redcap_type_metadata(self, df: pd.DataFrame) -> List[Dict[str, Any]]: skip_types = {"file", "calc", "descriptive", "notes"} # Get Column Metadata - columns = df.columns.tolist() + columns = df.columns for field in sorted(self.metadata, key=lambda f: f["field_name"]): if field["field_name"] in columns: field_type = field["field_type"] options: dict = {} if field_type in complex_types: rgx = self._field_rgx[field_type] + # Parse choices string: "1, Yes | 2, No" for option in field["select_choices_or_calculations"].split("|"): + if "," not in option: continue k, v = ( option.split(",")[0], (",".join(option.split(",")[1:])).strip(), @@ -866,12 +947,12 @@ def export_raw( for report_key, report_object in self.reports.items(): filename = f"{report_key}_raw{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["df"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["df"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 # Approx match to %.2f ) return self @@ -882,12 +963,12 @@ def export_transformed( for report_key, report_object in self.reports.items(): filename = f"{report_key}_transformed{filetype}" filepath = os.path.join(self.cwd, path, filename) - transformed = report_object["transformed"] - transformed.to_csv( + transformed: pl.DataFrame = report_object["transformed"] + transformed.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self @@ -896,15 +977,15 @@ def export_merged_transformed( self, filepath: str = "transformed-merged_redcap-extract.tsv", separator: str = "\t" ) -> object: filepath = os.path.join(self.cwd, filepath) - self.merged.to_csv( + self.merged.write_csv( filepath, - sep=separator, - quoting=csv.QUOTE_NONNUMERIC, - float_format=self.csv_float_format, + separator=separator, + quote_style="non_numeric", + float_precision=2 ) return self if __name__ == "__main__": - pass + pass else: - pass + pass diff --git a/modules/etl/vtypes/compound.py b/modules/etl/vtypes/compound.py index 82ce4366..5104b638 100644 --- a/modules/etl/vtypes/compound.py +++ b/modules/etl/vtypes/compound.py @@ -8,8 +8,6 @@ DoubleContinuousTimeseries, ) from typing import Tuple, List, Dict, Callable, Any -import pandas as pd - class Compound(ComplexVType): def __init__(self) -> None: diff --git a/modules/etl/vtypes/mixed.py b/modules/etl/vtypes/mixed.py index fe24f477..f5220b60 100644 --- a/modules/etl/vtypes/mixed.py +++ b/modules/etl/vtypes/mixed.py @@ -9,8 +9,6 @@ DoubleContinuousTimeseries, ) from .compound import Compound -import pandas as pd - class Mixed(ComplexVType): def __init__(self) -> None: diff --git a/modules/etl/vtypes/timeseries.py b/modules/etl/vtypes/timeseries.py index e7dba483..1d54e778 100644 --- a/modules/etl/vtypes/timeseries.py +++ b/modules/etl/vtypes/timeseries.py @@ -1,5 +1,4 @@ from .vtype import SimpleVType -import pandas as pd from datetime import datetime @@ -12,7 +11,7 @@ def __init__(self) -> None: ("group", str), ("x", datetime), ], - pd._libs.tslibs.nattype.NaTType, + str, ) @@ -26,7 +25,7 @@ def __init__(self) -> None: ("x", str), ("y", int), ], - pd._libs.tslibs.nattype.NaTType, + str, ) @@ -40,7 +39,7 @@ def __init__(self) -> None: ("x", str), ("y", float), ], - pd._libs.tslibs.nattype.NaTType, + str, ) diff --git a/modules/etl/vtypes/vtype.py b/modules/etl/vtypes/vtype.py index 7e1bb6b8..81ca6992 100644 --- a/modules/etl/vtypes/vtype.py +++ b/modules/etl/vtypes/vtype.py @@ -1,81 +1,88 @@ -from typing import Any, Callable, Union, List, Dict, Tuple -from datetime import datetime -import pandas as pd +from typing import Any, Callable, List, Dict, Tuple, Type, Union +import polars as pl +# A property on a SimpleVType: ("value", int), ("filterby", str), etc. +VTypeProp = Tuple[str, Callable[..., Any]] -class SimpleVType(object): +# A child vtype class included inside ComplexVType +VTypeClass = Type["BaseVType"] + +# ComplexVType may accept either real props (VTypeProp) +# or child vtype classes (VTypeClass) +PropsList = List[Union[VTypeProp, VTypeClass]] + + +class BaseVType: def __init__( self, name: str, - props: List[Tuple[str, Callable]], - missing_value: Callable, + props: PropsList, + missing_value: Callable[..., Any], ) -> None: self.name = name self.props = props self.missing_value = missing_value - # References self.validation_errors: List[str] = [] - def __str__(self): + def __str__(self) -> str: return f"{self.__dict__}" - def isvalid(self, df: pd.DataFrame, accessors: Dict[str, Dict[str, str]]) -> bool: - columns = df.columns - for pname, ptype in self.props: - if pname in accessors.keys(): - column = accessors[pname]["field"] - if column not in columns: - self.validation_errors.append( - f"VType {self.name.title()} pd.DataFrame argument (df) is missing column defined in accessors argument, {column}" - ) - return False - else: - continue - else: + def _validate_single_accessor( + self, + df_cols: set, + accessors: Dict[str, Dict[str, str]] + ) -> bool: + ok = True + vname = self.name.title() + + for item in self.props: + # ComplexVType entries may be classes, skip them + if not isinstance(item, tuple): + continue + + pname, _ = item + + field_info = accessors.get(pname) + if not field_info: + self.validation_errors.append( + f"VType {vname} accessors argument is missing required property, {pname}" + ) + ok = False + continue + + column = field_info["field"] + if column not in df_cols: self.validation_errors.append( - f"VType {self.name.title()} accessors argument is missing required property, {pname}" + f"VType {vname} pl.DataFrame argument (df) is missing column " + f"defined in accessors argument, {column}" ) - return False - return True + ok = False + return ok -class ComplexVType(object): - def __init__( + +class SimpleVType(BaseVType): + def isvalid( self, - name: str, - props: List[Any], - missing_value: Callable, - ) -> None: - self.name = name - self.props = props - self.missing_value = missing_value - # References - self.validation_errors: List[str] = [] + df: pl.DataFrame, + accessors: Dict[str, Dict[str, str]] + ) -> bool: + return self._validate_single_accessor(set(df.columns), accessors) - def __str__(self): - return f"{self.__dict__}" +class ComplexVType(BaseVType): def isvalid( - self, df: pd.DataFrame, accessorsList: List[Dict[str, Dict[str, str]]] + self, + df: pl.DataFrame, + accessors_list: List[Dict[str, Dict[str, str]]] ) -> bool: + df_cols = set(df.columns) valid = True - columns = df.columns - for accessors in accessorsList: - for pname, ptype in self.props: - if pname in accessors.keys(): - column = accessors[pname]["field"] - if column not in columns: - self.validation_errors.append( - f"VType {self.name.title()} pd.DataFrame argument (df) is missing column defined in accessors argument, {column}" - ) - valid = False - else: - continue - else: - self.validation_errors.append( - f"VType {self.name.title()} accessors argument is missing required property, {pname}" - ) - valid = False + + for accessors in accessors_list: + if not self._validate_single_accessor(df_cols, accessors): + valid = False + return valid diff --git a/poetry.lock b/poetry.lock index e4dc40bf..7844474f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,35 +1,207 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.13.2" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.13.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2372b15a5f62ed37789a6b383ff7344fc5b9f243999b0cd9b629d8bc5f5b4155"}, + {file = "aiohttp-3.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7f8659a48995edee7229522984bd1009c1213929c769c2daa80b40fe49a180c"}, + {file = "aiohttp-3.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:939ced4a7add92296b0ad38892ce62b98c619288a081170695c6babe4f50e636"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6315fb6977f1d0dd41a107c527fee2ed5ab0550b7d885bc15fee20ccb17891da"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6e7352512f763f760baaed2637055c49134fd1d35b37c2dedfac35bfe5cf8725"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e09a0a06348a2dd73e7213353c90d709502d9786219f69b731f6caa0efeb46f5"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a09a6d073fb5789456545bdee2474d14395792faa0527887f2f4ec1a486a59d3"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b59d13c443f8e049d9e94099c7e412e34610f1f49be0f230ec656a10692a5802"}, + {file = "aiohttp-3.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:20db2d67985d71ca033443a1ba2001c4b5693fe09b0e29f6d9358a99d4d62a8a"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:960c2fc686ba27b535f9fd2b52d87ecd7e4fd1cf877f6a5cba8afb5b4a8bd204"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6c00dbcf5f0d88796151e264a8eab23de2997c9303dd7c0bf622e23b24d3ce22"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fed38a5edb7945f4d1bcabe2fcd05db4f6ec7e0e82560088b754f7e08d93772d"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:b395bbca716c38bef3c764f187860e88c724b342c26275bc03e906142fc5964f"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:204ffff2426c25dfda401ba08da85f9c59525cdc42bda26660463dd1cbcfec6f"}, + {file = "aiohttp-3.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:05c4dd3c48fb5f15db31f57eb35374cb0c09afdde532e7fb70a75aede0ed30f6"}, + {file = "aiohttp-3.13.2-cp310-cp310-win32.whl", hash = "sha256:e574a7d61cf10351d734bcddabbe15ede0eaa8a02070d85446875dc11189a251"}, + {file = "aiohttp-3.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:364f55663085d658b8462a1c3f17b2b84a5c2e1ba858e1b79bff7b2e24ad1514"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb"}, + {file = "aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592"}, + {file = "aiohttp-3.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782"}, + {file = "aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8"}, + {file = "aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec"}, + {file = "aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc"}, + {file = "aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e"}, + {file = "aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169"}, + {file = "aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248"}, + {file = "aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e"}, + {file = "aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742"}, + {file = "aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e"}, + {file = "aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476"}, + {file = "aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23"}, + {file = "aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254"}, + {file = "aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61"}, + {file = "aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011"}, + {file = "aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4"}, + {file = "aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a"}, + {file = "aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940"}, + {file = "aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd"}, + {file = "aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e"}, + {file = "aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be"}, + {file = "aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c"}, + {file = "aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734"}, + {file = "aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7fbdf5ad6084f1940ce88933de34b62358d0f4a0b6ec097362dcd3e5a65a4989"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7c3a50345635a02db61792c85bb86daffac05330f6473d524f1a4e3ef9d0046d"}, + {file = "aiohttp-3.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e87dff73f46e969af38ab3f7cb75316a7c944e2e574ff7c933bc01b10def7f5"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2adebd4577724dcae085665f294cc57c8701ddd4d26140504db622b8d566d7aa"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e036a3a645fe92309ec34b918394bb377950cbb43039a97edae6c08db64b23e2"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:23ad365e30108c422d0b4428cf271156dd56790f6dd50d770b8e360e6c5ab2e6"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1f9b2c2d4b9d958b1f9ae0c984ec1dd6b6689e15c75045be8ccb4011426268ca"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a92cf4b9bea33e15ecbaa5c59921be0f23222608143d025c989924f7e3e0c07"}, + {file = "aiohttp-3.13.2-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:070599407f4954021509193404c4ac53153525a19531051661440644728ba9a7"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:29562998ec66f988d49fb83c9b01694fa927186b781463f376c5845c121e4e0b"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4dd3db9d0f4ebca1d887d76f7cdbcd1116ac0d05a9221b9dad82c64a62578c4d"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d7bc4b7f9c4921eba72677cd9fedd2308f4a4ca3e12fab58935295ad9ea98700"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:dacd50501cd017f8cccb328da0c90823511d70d24a323196826d923aad865901"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8b2f1414f6a1e0683f212ec80e813f4abef94c739fd090b66c9adf9d2a05feac"}, + {file = "aiohttp-3.13.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04c3971421576ed24c191f610052bcb2f059e395bc2489dd99e397f9bc466329"}, + {file = "aiohttp-3.13.2-cp39-cp39-win32.whl", hash = "sha256:9f377d0a924e5cc94dc620bc6366fc3e889586a7f18b748901cf016c916e2084"}, + {file = "aiohttp-3.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:9c705601e16c03466cb72011bd1af55d68fa65b045356d8f96c216e5f6db0fa5"}, + {file = "aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.5.0" +aiosignal = ">=1.4.0" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "backports.zstd ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.4.0" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, + {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" +typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""} [[package]] name = "alembic" -version = "1.14.1" +version = "1.17.2" description = "A database migration tool for SQLAlchemy." -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "alembic-1.14.1-py3-none-any.whl", hash = "sha256:1acdd7a3a478e208b0503cd73614d5e4c6efafa4e73518bb60e4f2846a37b1c5"}, - {file = "alembic-1.14.1.tar.gz", hash = "sha256:496e888245a53adf1498fcab31713a469c65836f8de76e01399aa1c3e90dd213"}, + {file = "alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6"}, + {file = "alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e"}, ] [package.dependencies] Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" +SQLAlchemy = ">=1.4.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.12" [package.extras] -tz = ["backports.zoneinfo", "tzdata"] +tz = ["tzdata"] [[package]] name = "aniso8601" -version = "10.0.0" +version = "10.0.1" description = "A library for parsing ISO 8601 strings." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "aniso8601-10.0.0-py2.py3-none-any.whl", hash = "sha256:3c943422efaa0229ebd2b0d7d223effb5e7c89e24d2267ebe76c61a2d8e290cb"}, - {file = "aniso8601-10.0.0.tar.gz", hash = "sha256:ff1d0fc2346688c62c0151547136ac30e322896ed8af316ef7602c47da9426cf"}, + {file = "aniso8601-10.0.1-py2.py3-none-any.whl", hash = "sha256:eb19717fd4e0db6de1aab06f12450ab92144246b257423fe020af5748c0cb89e"}, + {file = "aniso8601-10.0.1.tar.gz", hash = "sha256:25488f8663dd1528ae1f54f94ac1ea51ae25b4d531539b8bc707fed184d16845"}, ] [package.extras] @@ -37,14 +209,14 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] [[package]] name = "anyio" -version = "4.8.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "dev" +version = "4.11.0" +description = "High-level concurrency and networking framework on top of asyncio or Trio" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, - {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, + {file = "anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc"}, + {file = "anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4"}, ] [package.dependencies] @@ -54,17 +226,16 @@ sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] -trio = ["trio (>=0.26.1)"] +trio = ["trio (>=0.31.0)"] [[package]] name = "appnope" version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -72,32 +243,27 @@ files = [ [[package]] name = "argon2-cffi" -version = "23.1.0" +version = "25.1.0" description = "Argon2 for Python" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, + {file = "argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741"}, + {file = "argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1"}, ] [package.dependencies] argon2-cffi-bindings = "*" -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - [[package]] name = "argon2-cffi-bindings" version = "21.2.0" description = "Low-level CFFI bindings for Argon2" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "python_version >= \"3.14\"" files = [ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, @@ -129,36 +295,76 @@ cffi = ">=1.0.1" dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] +[[package]] +name = "argon2-cffi-bindings" +version = "25.1.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version < \"3.14\"" +files = [ + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:3d3f05610594151994ca9ccb3c771115bdb4daef161976a266f0dd8aa9996b8f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8b8efee945193e667a396cbc7b4fb7d357297d6234d30a489905d96caabde56b"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3c6702abc36bf3ccba3f802b799505def420a1b7039862014a65db3205967f5a"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1c70058c6ab1e352304ac7e3b52554daadacd8d453c1752e547c76e9c99ac44"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2fd3bfbff3c5d74fef31a722f729bf93500910db650c925c2d6ef879a7e51cb"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4f9665de60b1b0e99bcd6be4f17d90339698ce954cfd8d9cf4f91c995165a92"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ba92837e4a9aa6a508c8d2d7883ed5a8f6c308c89a4790e1e447a220deb79a85"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win32.whl", hash = "sha256:84a461d4d84ae1295871329b346a97f68eade8c53b6ed9a7ca2d7467f3c8ff6f"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b55aec3565b65f56455eebc9b9f34130440404f27fe21c3b375bf1ea4d8fbae6"}, + {file = "argon2_cffi_bindings-25.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87c33a52407e4c41f3b70a9c2d3f6056d88b10dad7695be708c5021673f55623"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98"}, + {file = "argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6dca33a9859abf613e22733131fc9194091c1fa7cb3e131c143056b4856aa47e"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:21378b40e1b8d1655dd5310c84a40fc19a9aa5e6366e835ceb8576bf0fea716d"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d588dec224e2a83edbdc785a5e6f3c6cd736f46bfd4b441bbb5aa1f5085e584"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5acb4e41090d53f17ca1110c3427f0a130f944b896fc8c83973219c97f57b690"}, + {file = "argon2_cffi_bindings-25.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:da0c79c23a63723aa5d782250fbf51b768abca630285262fb5144ba5ae01e520"}, + {file = "argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d"}, +] + +[package.dependencies] +cffi = {version = ">=1.0.1", markers = "python_version < \"3.14\""} + [[package]] name = "arrow" -version = "1.3.0" +version = "1.4.0" description = "Better dates & times for Python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, - {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, + {file = "arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205"}, + {file = "arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7"}, ] [package.dependencies] python-dateutil = ">=2.7.0" -types-python-dateutil = ">=2.8.10" +tzdata = {version = "*", markers = "python_version >= \"3.9\""} [package.extras] doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] -test = ["dateparser (>=1.0.0,<2.0.0)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (>=3.0.0,<4.0.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2025.2)", "simplejson (==3.*)"] [[package]] name = "art" -version = "6.4" +version = "6.5" description = "ASCII Art Library For Python" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "art-6.4-py3-none-any.whl", hash = "sha256:4e58b6f0a0bb8574efb311eff24bdd28bf889c0c526ccbbb5410c644340a301c"}, - {file = "art-6.4.tar.gz", hash = "sha256:417fea674bff8cea7ed058291ad1b81a6032dfce5152f28e629fa4a798a2c14c"}, + {file = "art-6.5-py3-none-any.whl", hash = "sha256:70706408144c45c666caab690627d5c74aea7b6c7ce8cc968408ddeef8d84afd"}, + {file = "art-6.5.tar.gz", hash = "sha256:a98d77b42c278697ec6cf4b5bdcdfd997f6b2425332da078d4e31e31377d1844"}, ] [package.extras] @@ -168,9 +374,9 @@ dev = ["bandit (>=1.5.1)", "coverage (>=4.1)", "pydocstyle (>=3.0.0)", "vulture name = "astroid" version = "2.15.8" description = "An abstract syntax tree for Python with inference support." -category = "dev" optional = false python-versions = ">=3.7.2" +groups = ["dev"] files = [ {file = "astroid-2.15.8-py3-none-any.whl", hash = "sha256:1aa149fc5c6589e3d0ece885b4491acd80af4f087baafa3fb5203b113e68cd3c"}, {file = "astroid-2.15.8.tar.gz", hash = "sha256:6c107453dffee9055899705de3c9ead36e74119cee151e5a9aaf7f0b0e020a6a"}, @@ -188,9 +394,9 @@ wrapt = [ name = "asttokens" version = "3.0.0" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, @@ -202,26 +408,27 @@ test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "async-lru" -version = "2.0.4" +version = "2.0.5" description = "Simple LRU cache for asyncio" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, + {file = "async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943"}, + {file = "async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb"}, ] [package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "async-timeout" version = "5.0.1" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_full_version < \"3.11.3\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -229,98 +436,63 @@ files = [ [[package]] name = "attrs" -version = "25.1.0" +version = "25.4.0" description = "Classes Without Boilerplate" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, - {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "azure-common" -version = "1.1.28" -description = "Microsoft Azure Client Library for Python (Common)" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "azure-common-1.1.28.zip", hash = "sha256:4ac0cd3214e36b6a1b6a442686722a5d8cc449603aa833f3f0f40bda836704a3"}, - {file = "azure_common-1.1.28-py2.py3-none-any.whl", hash = "sha256:5c12d3dcf4ec20599ca6b0d3e09e86e146353d443e7fcc050c9a19c1f9df20ad"}, + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, ] [[package]] name = "azure-communication-email" -version = "1.0.0" +version = "1.1.0" description = "Microsoft Azure MyService Management Client Library for Python" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "azure-communication-email-1.0.0.zip", hash = "sha256:5df96b8b4389696244982ffc3740722f1948abb289f19af00ce2e1c534450095"}, - {file = "azure_communication_email-1.0.0-py3-none-any.whl", hash = "sha256:b580ccfc9f1372d0b65f235334e569f3909894316bc3203bd9deb5760612693a"}, + {file = "azure_communication_email-1.1.0-py3-none-any.whl", hash = "sha256:9212153f21cf7e68734c32ebfe8702b43398bd01df2dddb0ca52cd5a8bbd5024"}, + {file = "azure_communication_email-1.1.0.tar.gz", hash = "sha256:6a4af8281024327c3ab18a4996919069a99a69aad3a19c40f7852a6682493327"}, ] [package.dependencies] -azure-common = ">=1.1,<2.0" -azure-mgmt-core = ">=1.3.2,<2.0.0" -msrest = ">=0.7.1" +azure-core = ">=1.30.0" +isodate = ">=0.6.1" +typing-extensions = ">=4.6.0" [[package]] name = "azure-core" -version = "1.32.0" +version = "1.36.0" description = "Microsoft Azure Core Library for Python" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, - {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, + {file = "azure_core-1.36.0-py3-none-any.whl", hash = "sha256:fee9923a3a753e94a259563429f3644aaf05c486d45b1215d098115102d91d3b"}, + {file = "azure_core-1.36.0.tar.gz", hash = "sha256:22e5605e6d0bf1d229726af56d9e92bc37b6e726b141a18be0b4d424131741b7"}, ] [package.dependencies] requests = ">=2.21.0" -six = ">=1.11.0" typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] - -[[package]] -name = "azure-mgmt-core" -version = "1.5.0" -description = "Microsoft Azure Management Core Library for Python" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "azure_mgmt_core-1.5.0-py3-none-any.whl", hash = "sha256:18aaa5a723ee8ae05bf1bfc9f6d0ffb996631c7ea3c922cc86f522973ce07b5f"}, - {file = "azure_mgmt_core-1.5.0.tar.gz", hash = "sha256:380ae3dfa3639f4a5c246a7db7ed2d08374e88230fd0da3eb899f7c11e5c441a"}, -] - -[package.dependencies] -azure-core = ">=1.31.0" +tracing = ["opentelemetry-api (>=1.26,<2.0)"] [[package]] name = "azure-storage-blob" -version = "12.25.1" +version = "12.27.1" description = "Microsoft Azure Blob Storage Client Library for Python" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "azure_storage_blob-12.25.1-py3-none-any.whl", hash = "sha256:1f337aab12e918ec3f1b638baada97550673911c4ceed892acc8e4e891b74167"}, - {file = "azure_storage_blob-12.25.1.tar.gz", hash = "sha256:4f294ddc9bc47909ac66b8934bd26b50d2000278b10ad82cc109764fdc6e0e3b"}, + {file = "azure_storage_blob-12.27.1-py3-none-any.whl", hash = "sha256:65d1e25a4628b7b6acd20ff7902d8da5b4fde8e46e19c8f6d213a3abc3ece272"}, + {file = "azure_storage_blob-12.27.1.tar.gz", hash = "sha256:a1596cc4daf5dac9be115fcb5db67245eae894cf40e4248243754261f7b674a6"}, ] [package.dependencies] @@ -334,19 +506,19 @@ aio = ["azure-core[aio] (>=1.30.0)"] [[package]] name = "azure-storage-file-datalake" -version = "12.20.0" +version = "12.22.0" description = "Microsoft Azure File DataLake Storage Client Library for Python" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "azure_storage_file_datalake-12.20.0-py3-none-any.whl", hash = "sha256:d32af627ccf7f8523ac4520a4004291b8ca340d7c607545366e1b08822afa01b"}, - {file = "azure_storage_file_datalake-12.20.0.tar.gz", hash = "sha256:8d6932b75470de935998755d980a3296412d8ff2a11631c1d99267ae65f11f03"}, + {file = "azure_storage_file_datalake-12.22.0-py3-none-any.whl", hash = "sha256:dba235d2fa21135205dbcbba884ea5f8a3aff800a8f89205a1a5a404843a1fc5"}, + {file = "azure_storage_file_datalake-12.22.0.tar.gz", hash = "sha256:9aed0d35f3327baeeb11b1950b140f97b356cfd368fc2cd105c32c820c49af77"}, ] [package.dependencies] azure-core = ">=1.30.0" -azure-storage-blob = ">=12.25.1" +azure-storage-blob = ">=12.27.0" isodate = ">=0.6.1" typing-extensions = ">=4.6.0" @@ -357,50 +529,76 @@ aio = ["azure-core[aio] (>=1.30.0)"] name = "babel" version = "2.17.0" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "bcrypt" -version = "4.2.1" +version = "4.3.0" description = "Modern password hashing for your software and your servers" -category = "main" optional = false -python-versions = ">=3.7" -files = [ - {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"}, - {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"}, - {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"}, - {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"}, - {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"}, - {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"}, - {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"}, - {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"}, - {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"}, - {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"}, - {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"}, - {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"}, - {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"}, +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"}, + {file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"}, + {file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"}, + {file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"}, + {file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"}, + {file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"}, + {file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"}, + {file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"}, ] [package.extras] @@ -409,14 +607,14 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.13.3" +version = "4.14.2" description = "Screen-scraping library" -category = "dev" optional = false python-versions = ">=3.7.0" +groups = ["dev"] files = [ - {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, - {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, + {file = "beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515"}, + {file = "beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e"}, ] [package.dependencies] @@ -434,9 +632,9 @@ lxml = ["lxml"] name = "black" version = "23.12.1" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, @@ -473,20 +671,20 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bleach" -version = "6.2.0" +version = "6.3.0" description = "An easy safelist-based HTML-sanitizing tool." -category = "dev" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, - {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, + {file = "bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6"}, + {file = "bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22"}, ] [package.dependencies] @@ -500,9 +698,9 @@ css = ["tinycss2 (>=1.1.0,<1.5)"] name = "blinker" version = "1.9.0" description = "Fast, simple object-to-object and broadcast signaling" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, @@ -510,35 +708,35 @@ files = [ [[package]] name = "cachelib" -version = "0.9.0" +version = "0.13.0" description = "A collection of cache libraries in the same API interface." -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "cachelib-0.9.0-py3-none-any.whl", hash = "sha256:811ceeb1209d2fe51cd2b62810bd1eccf70feba5c52641532498be5c675493b3"}, - {file = "cachelib-0.9.0.tar.gz", hash = "sha256:38222cc7c1b79a23606de5c2607f4925779e37cdcea1c2ad21b8bae94b5425a5"}, + {file = "cachelib-0.13.0-py3-none-any.whl", hash = "sha256:8c8019e53b6302967d4e8329a504acf75e7bc46130291d30188a6e4e58162516"}, + {file = "cachelib-0.13.0.tar.gz", hash = "sha256:209d8996e3c57595bee274ff97116d1d73c4980b2fd9a34c7846cd07fd2e1a48"}, ] [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.11.12" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["main", "dev"] files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, ] [[package]] name = "cffi" version = "1.17.1" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -614,116 +812,137 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.4.1" +version = "3.4.4" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, ] [[package]] name = "click" -version = "8.1.8" +version = "8.3.0" description = "Composable command line interface toolkit" -category = "main" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["main", "dev"] files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, + {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"}, + {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"}, ] [package.dependencies] @@ -733,118 +952,145 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\""} [[package]] name = "comm" -version = "0.2.2" +version = "0.2.3" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, + {file = "comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417"}, + {file = "comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971"}, ] -[package.dependencies] -traitlets = ">=4" - [package.extras] test = ["pytest"] [[package]] name = "coverage" -version = "7.6.12" +version = "7.11.3" description = "Code coverage measurement for Python" -category = "dev" optional = false -python-versions = ">=3.9" -files = [ - {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, - {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, - {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, - {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, - {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, - {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, - {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, - {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, - {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, - {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, - {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, - {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, - {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, - {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, - {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, - {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, - {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "coverage-7.11.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0c986537abca9b064510f3fd104ba33e98d3036608c7f2f5537f869bc10e1ee5"}, + {file = "coverage-7.11.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:28c5251b3ab1d23e66f1130ca0c419747edfbcb4690de19467cd616861507af7"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4f2bb4ee8dd40f9b2a80bb4adb2aecece9480ba1fa60d9382e8c8e0bd558e2eb"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e5f4bfac975a2138215a38bda599ef00162e4143541cf7dd186da10a7f8e69f1"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f4cbfff5cf01fa07464439a8510affc9df281535f41a1f5312fbd2b59b4ab5c"}, + {file = "coverage-7.11.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:31663572f20bf3406d7ac00d6981c7bbbcec302539d26b5ac596ca499664de31"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9799bd6a910961cb666196b8583ed0ee125fa225c6fdee2cbf00232b861f29d2"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:097acc18bedf2c6e3144eaf09b5f6034926c3c9bb9e10574ffd0942717232507"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:6f033dec603eea88204589175782290a038b436105a8f3637a81c4359df27832"}, + {file = "coverage-7.11.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dd9ca2d44ed8018c90efb72f237a2a140325a4c3339971364d758e78b175f58e"}, + {file = "coverage-7.11.3-cp310-cp310-win32.whl", hash = "sha256:900580bc99c145e2561ea91a2d207e639171870d8a18756eb57db944a017d4bb"}, + {file = "coverage-7.11.3-cp310-cp310-win_amd64.whl", hash = "sha256:c8be5bfcdc7832011b2652db29ed7672ce9d353dd19bce5272ca33dbcf60aaa8"}, + {file = "coverage-7.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:200bb89fd2a8a07780eafcdff6463104dec459f3c838d980455cfa84f5e5e6e1"}, + {file = "coverage-7.11.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8d264402fc179776d43e557e1ca4a7d953020d3ee95f7ec19cc2c9d769277f06"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:385977d94fc155f8731c895accdfcc3dd0d9dd9ef90d102969df95d3c637ab80"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0542ddf6107adbd2592f29da9f59f5d9cff7947b5bb4f734805085c327dcffaa"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d60bf4d7f886989ddf80e121a7f4d140d9eac91f1d2385ce8eb6bda93d563297"}, + {file = "coverage-7.11.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0a3b6e32457535df0d41d2d895da46434706dd85dbaf53fbc0d3bd7d914b362"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:876a3ee7fd2613eb79602e4cdb39deb6b28c186e76124c3f29e580099ec21a87"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a730cd0824e8083989f304e97b3f884189efb48e2151e07f57e9e138ab104200"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:b5cd111d3ab7390be0c07ad839235d5ad54d2ca497b5f5db86896098a77180a4"}, + {file = "coverage-7.11.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:074e6a5cd38e06671580b4d872c1a67955d4e69639e4b04e87fc03b494c1f060"}, + {file = "coverage-7.11.3-cp311-cp311-win32.whl", hash = "sha256:86d27d2dd7c7c5a44710565933c7dc9cd70e65ef97142e260d16d555667deef7"}, + {file = "coverage-7.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:ca90ef33a152205fb6f2f0c1f3e55c50df4ef049bb0940ebba666edd4cdebc55"}, + {file = "coverage-7.11.3-cp311-cp311-win_arm64.whl", hash = "sha256:56f909a40d68947ef726ce6a34eb38f0ed241ffbe55c5007c64e616663bcbafc"}, + {file = "coverage-7.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b771b59ac0dfb7f139f70c85b42717ef400a6790abb6475ebac1ecee8de782f"}, + {file = "coverage-7.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:603c4414125fc9ae9000f17912dcfd3d3eb677d4e360b85206539240c96ea76e"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:77ffb3b7704eb7b9b3298a01fe4509cef70117a52d50bcba29cffc5f53dd326a"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4d4ca49f5ba432b0755ebb0fc3a56be944a19a16bb33802264bbc7311622c0d1"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:05fd3fb6edff0c98874d752013588836f458261e5eba587afe4c547bba544afd"}, + {file = "coverage-7.11.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0e920567f8c3a3ce68ae5a42cf7c2dc4bb6cc389f18bff2235dd8c03fa405de5"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4bec8c7160688bd5a34e65c82984b25409563134d63285d8943d0599efbc448e"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:adb9b7b42c802bd8cb3927de8c1c26368ce50c8fdaa83a9d8551384d77537044"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:c8f563b245b4ddb591e99f28e3cd140b85f114b38b7f95b2e42542f0603eb7d7"}, + {file = "coverage-7.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e2a96fdc7643c9517a317553aca13b5cae9bad9a5f32f4654ce247ae4d321405"}, + {file = "coverage-7.11.3-cp312-cp312-win32.whl", hash = "sha256:e8feeb5e8705835f0622af0fe7ff8d5cb388948454647086494d6c41ec142c2e"}, + {file = "coverage-7.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:abb903ffe46bd319d99979cdba350ae7016759bb69f47882242f7b93f3356055"}, + {file = "coverage-7.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:1451464fd855d9bd000c19b71bb7dafea9ab815741fb0bd9e813d9b671462d6f"}, + {file = "coverage-7.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84b892e968164b7a0498ddc5746cdf4e985700b902128421bb5cec1080a6ee36"}, + {file = "coverage-7.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f761dbcf45e9416ec4698e1a7649248005f0064ce3523a47402d1bff4af2779e"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1410bac9e98afd9623f53876fae7d8a5db9f5a0ac1c9e7c5188463cb4b3212e2"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:004cdcea3457c0ea3233622cd3464c1e32ebba9b41578421097402bee6461b63"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f067ada2c333609b52835ca4d4868645d3b63ac04fb2b9a658c55bba7f667d3"}, + {file = "coverage-7.11.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:07bc7745c945a6d95676953e86ba7cebb9f11de7773951c387f4c07dc76d03f5"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8bba7e4743e37484ae17d5c3b8eb1ce78b564cb91b7ace2e2182b25f0f764cb5"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbffc22d80d86fbe456af9abb17f7a7766e7b2101f7edaacc3535501691563f7"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0dba4da36730e384669e05b765a2c49f39514dd3012fcc0398dd66fba8d746d5"}, + {file = "coverage-7.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ae12fe90b00b71a71b69f513773310782ce01d5f58d2ceb2b7c595ab9d222094"}, + {file = "coverage-7.11.3-cp313-cp313-win32.whl", hash = "sha256:12d821de7408292530b0d241468b698bce18dd12ecaf45316149f53877885f8c"}, + {file = "coverage-7.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:6bb599052a974bb6cedfa114f9778fedfad66854107cf81397ec87cb9b8fbcf2"}, + {file = "coverage-7.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:bb9d7efdb063903b3fdf77caec7b77c3066885068bdc0d44bc1b0c171033f944"}, + {file = "coverage-7.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:fb58da65e3339b3dbe266b607bb936efb983d86b00b03eb04c4ad5b442c58428"}, + {file = "coverage-7.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8d16bbe566e16a71d123cd66382c1315fcd520c7573652a8074a8fe281b38c6a"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8258f10059b5ac837232c589a350a2df4a96406d6d5f2a09ec587cbdd539655"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4c5627429f7fbff4f4131cfdd6abd530734ef7761116811a707b88b7e205afd7"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:465695268414e149bab754c54b0c45c8ceda73dd4a5c3ba255500da13984b16d"}, + {file = "coverage-7.11.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4ebcddfcdfb4c614233cff6e9a3967a09484114a8b2e4f2c7a62dc83676ba13f"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13b2066303a1c1833c654d2af0455bb009b6e1727b3883c9964bc5c2f643c1d0"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d8750dd20362a1b80e3cf84f58013d4672f89663aee457ea59336df50fab6739"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ab6212e62ea0e1006531a2234e209607f360d98d18d532c2fa8e403c1afbdd71"}, + {file = "coverage-7.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a6b17c2b5e0b9bb7702449200f93e2d04cb04b1414c41424c08aa1e5d352da76"}, + {file = "coverage-7.11.3-cp313-cp313t-win32.whl", hash = "sha256:426559f105f644b69290ea414e154a0d320c3ad8a2bb75e62884731f69cf8e2c"}, + {file = "coverage-7.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:90a96fcd824564eae6137ec2563bd061d49a32944858d4bdbae5c00fb10e76ac"}, + {file = "coverage-7.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:1e33d0bebf895c7a0905fcfaff2b07ab900885fc78bba2a12291a2cfbab014cc"}, + {file = "coverage-7.11.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fdc5255eb4815babcdf236fa1a806ccb546724c8a9b129fd1ea4a5448a0bf07c"}, + {file = "coverage-7.11.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fe3425dc6021f906c6325d3c415e048e7cdb955505a94f1eb774dafc779ba203"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4ca5f876bf41b24378ee67c41d688155f0e54cdc720de8ef9ad6544005899240"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9061a3e3c92b27fd8036dafa26f25d95695b6aa2e4514ab16a254f297e664f83"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:abcea3b5f0dc44e1d01c27090bc32ce6ffb7aa665f884f1890710454113ea902"}, + {file = "coverage-7.11.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:68c4eb92997dbaaf839ea13527be463178ac0ddd37a7ac636b8bc11a51af2428"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:149eccc85d48c8f06547534068c41d69a1a35322deaa4d69ba1561e2e9127e75"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:08c0bcf932e47795c49f0406054824b9d45671362dfc4269e0bc6e4bff010704"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:39764c6167c82d68a2d8c97c33dba45ec0ad9172570860e12191416f4f8e6e1b"}, + {file = "coverage-7.11.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3224c7baf34e923ffc78cb45e793925539d640d42c96646db62dbd61bbcfa131"}, + {file = "coverage-7.11.3-cp314-cp314-win32.whl", hash = "sha256:c713c1c528284d636cd37723b0b4c35c11190da6f932794e145fc40f8210a14a"}, + {file = "coverage-7.11.3-cp314-cp314-win_amd64.whl", hash = "sha256:c381a252317f63ca0179d2c7918e83b99a4ff3101e1b24849b999a00f9cd4f86"}, + {file = "coverage-7.11.3-cp314-cp314-win_arm64.whl", hash = "sha256:3e33a968672be1394eded257ec10d4acbb9af2ae263ba05a99ff901bb863557e"}, + {file = "coverage-7.11.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f9c96a29c6d65bd36a91f5634fef800212dff69dacdb44345c4c9783943ab0df"}, + {file = "coverage-7.11.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2ec27a7a991d229213c8070d31e3ecf44d005d96a9edc30c78eaeafaa421c001"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:72c8b494bd20ae1c58528b97c4a67d5cfeafcb3845c73542875ecd43924296de"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:60ca149a446da255d56c2a7a813b51a80d9497a62250532598d249b3cdb1a926"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb5069074db19a534de3859c43eec78e962d6d119f637c41c8e028c5ab3f59dd"}, + {file = "coverage-7.11.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac5d5329c9c942bbe6295f4251b135d860ed9f86acd912d418dce186de7c19ac"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e22539b676fafba17f0a90ac725f029a309eb6e483f364c86dcadee060429d46"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:2376e8a9c889016f25472c452389e98bc6e54a19570b107e27cde9d47f387b64"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:4234914b8c67238a3c4af2bba648dc716aa029ca44d01f3d51536d44ac16854f"}, + {file = "coverage-7.11.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f0b4101e2b3c6c352ff1f70b3a6fcc7c17c1ab1a91ccb7a33013cb0782af9820"}, + {file = "coverage-7.11.3-cp314-cp314t-win32.whl", hash = "sha256:305716afb19133762e8cf62745c46c4853ad6f9eeba54a593e373289e24ea237"}, + {file = "coverage-7.11.3-cp314-cp314t-win_amd64.whl", hash = "sha256:9245bd392572b9f799261c4c9e7216bafc9405537d0f4ce3ad93afe081a12dc9"}, + {file = "coverage-7.11.3-cp314-cp314t-win_arm64.whl", hash = "sha256:9a1d577c20b4334e5e814c3d5fe07fa4a8c3ae42a601945e8d7940bab811d0bd"}, + {file = "coverage-7.11.3-py3-none-any.whl", hash = "sha256:351511ae28e2509c8d8cae5311577ea7dd511ab8e746ffc8814a0896c3d33fbe"}, + {file = "coverage-7.11.3.tar.gz", hash = "sha256:0f59387f5e6edbbffec2281affb71cdc85e0776c1745150a3ab9b6c1d016106b"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "coveragespace" version = "6.1" description = "A place to track your code coverage metrics." -category = "dev" optional = false python-versions = "<4.0,>=3.8" +groups = ["dev"] files = [ {file = "coveragespace-6.1-py3-none-any.whl", hash = "sha256:ca6ccd5eb32eb6ce5fe78de6c052353b9fbb378a886fde0838480defe33406a8"}, {file = "coveragespace-6.1.tar.gz", hash = "sha256:049c0b7b629ad43d72692f0f99b9f8a97936ad596f7f27c1af61323fba90ebef"}, @@ -861,9 +1107,9 @@ requests = ">=2.28,<3.0" name = "cryptography" version = "3.4.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "cryptography-3.4.8-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14"}, {file = "cryptography-3.4.8-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7"}, @@ -899,59 +1145,63 @@ test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.0)" [[package]] name = "debugpy" -version = "1.8.12" +version = "1.8.17" description = "An implementation of the Debug Adapter Protocol for Python" -category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.12-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:a2ba7ffe58efeae5b8fad1165357edfe01464f9aef25e814e891ec690e7dd82a"}, - {file = "debugpy-1.8.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbbd4149c4fc5e7d508ece083e78c17442ee13b0e69bfa6bd63003e486770f45"}, - {file = "debugpy-1.8.12-cp310-cp310-win32.whl", hash = "sha256:b202f591204023b3ce62ff9a47baa555dc00bb092219abf5caf0e3718ac20e7c"}, - {file = "debugpy-1.8.12-cp310-cp310-win_amd64.whl", hash = "sha256:9649eced17a98ce816756ce50433b2dd85dfa7bc92ceb60579d68c053f98dff9"}, - {file = "debugpy-1.8.12-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:36f4829839ef0afdfdd208bb54f4c3d0eea86106d719811681a8627ae2e53dd5"}, - {file = "debugpy-1.8.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a28ed481d530e3138553be60991d2d61103ce6da254e51547b79549675f539b7"}, - {file = "debugpy-1.8.12-cp311-cp311-win32.whl", hash = "sha256:4ad9a94d8f5c9b954e0e3b137cc64ef3f579d0df3c3698fe9c3734ee397e4abb"}, - {file = "debugpy-1.8.12-cp311-cp311-win_amd64.whl", hash = "sha256:4703575b78dd697b294f8c65588dc86874ed787b7348c65da70cfc885efdf1e1"}, - {file = "debugpy-1.8.12-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:7e94b643b19e8feb5215fa508aee531387494bf668b2eca27fa769ea11d9f498"}, - {file = "debugpy-1.8.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086b32e233e89a2740c1615c2f775c34ae951508b28b308681dbbb87bba97d06"}, - {file = "debugpy-1.8.12-cp312-cp312-win32.whl", hash = "sha256:2ae5df899732a6051b49ea2632a9ea67f929604fd2b036613a9f12bc3163b92d"}, - {file = "debugpy-1.8.12-cp312-cp312-win_amd64.whl", hash = "sha256:39dfbb6fa09f12fae32639e3286112fc35ae976114f1f3d37375f3130a820969"}, - {file = "debugpy-1.8.12-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:696d8ae4dff4cbd06bf6b10d671e088b66669f110c7c4e18a44c43cf75ce966f"}, - {file = "debugpy-1.8.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:898fba72b81a654e74412a67c7e0a81e89723cfe2a3ea6fcd3feaa3395138ca9"}, - {file = "debugpy-1.8.12-cp313-cp313-win32.whl", hash = "sha256:22a11c493c70413a01ed03f01c3c3a2fc4478fc6ee186e340487b2edcd6f4180"}, - {file = "debugpy-1.8.12-cp313-cp313-win_amd64.whl", hash = "sha256:fdb3c6d342825ea10b90e43d7f20f01535a72b3a1997850c0c3cefa5c27a4a2c"}, - {file = "debugpy-1.8.12-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:b0232cd42506d0c94f9328aaf0d1d0785f90f87ae72d9759df7e5051be039738"}, - {file = "debugpy-1.8.12-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9af40506a59450f1315168d47a970db1a65aaab5df3833ac389d2899a5d63b3f"}, - {file = "debugpy-1.8.12-cp38-cp38-win32.whl", hash = "sha256:5cc45235fefac57f52680902b7d197fb2f3650112379a6fa9aa1b1c1d3ed3f02"}, - {file = "debugpy-1.8.12-cp38-cp38-win_amd64.whl", hash = "sha256:557cc55b51ab2f3371e238804ffc8510b6ef087673303890f57a24195d096e61"}, - {file = "debugpy-1.8.12-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:b5c6c967d02fee30e157ab5227706f965d5c37679c687b1e7bbc5d9e7128bd41"}, - {file = "debugpy-1.8.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a77f422f31f170c4b7e9ca58eae2a6c8e04da54121900651dfa8e66c29901a"}, - {file = "debugpy-1.8.12-cp39-cp39-win32.whl", hash = "sha256:a4042edef80364239f5b7b5764e55fd3ffd40c32cf6753da9bda4ff0ac466018"}, - {file = "debugpy-1.8.12-cp39-cp39-win_amd64.whl", hash = "sha256:f30b03b0f27608a0b26c75f0bb8a880c752c0e0b01090551b9d87c7d783e2069"}, - {file = "debugpy-1.8.12-py2.py3-none-any.whl", hash = "sha256:274b6a2040349b5c9864e475284bce5bb062e63dce368a394b8cc865ae3b00c6"}, - {file = "debugpy-1.8.12.tar.gz", hash = "sha256:646530b04f45c830ceae8e491ca1c9320a2d2f0efea3141487c82130aba70dce"}, +groups = ["dev"] +files = [ + {file = "debugpy-1.8.17-cp310-cp310-macosx_15_0_x86_64.whl", hash = "sha256:c41d2ce8bbaddcc0009cc73f65318eedfa3dbc88a8298081deb05389f1ab5542"}, + {file = "debugpy-1.8.17-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:1440fd514e1b815edd5861ca394786f90eb24960eb26d6f7200994333b1d79e3"}, + {file = "debugpy-1.8.17-cp310-cp310-win32.whl", hash = "sha256:3a32c0af575749083d7492dc79f6ab69f21b2d2ad4cd977a958a07d5865316e4"}, + {file = "debugpy-1.8.17-cp310-cp310-win_amd64.whl", hash = "sha256:a3aad0537cf4d9c1996434be68c6c9a6d233ac6f76c2a482c7803295b4e4f99a"}, + {file = "debugpy-1.8.17-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:d3fce3f0e3de262a3b67e69916d001f3e767661c6e1ee42553009d445d1cd840"}, + {file = "debugpy-1.8.17-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:c6bdf134457ae0cac6fb68205776be635d31174eeac9541e1d0c062165c6461f"}, + {file = "debugpy-1.8.17-cp311-cp311-win32.whl", hash = "sha256:e79a195f9e059edfe5d8bf6f3749b2599452d3e9380484cd261f6b7cd2c7c4da"}, + {file = "debugpy-1.8.17-cp311-cp311-win_amd64.whl", hash = "sha256:b532282ad4eca958b1b2d7dbcb2b7218e02cb934165859b918e3b6ba7772d3f4"}, + {file = "debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d"}, + {file = "debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc"}, + {file = "debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf"}, + {file = "debugpy-1.8.17-cp312-cp312-win_amd64.whl", hash = "sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464"}, + {file = "debugpy-1.8.17-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:857c1dd5d70042502aef1c6d1c2801211f3ea7e56f75e9c335f434afb403e464"}, + {file = "debugpy-1.8.17-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088"}, + {file = "debugpy-1.8.17-cp313-cp313-win32.whl", hash = "sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83"}, + {file = "debugpy-1.8.17-cp313-cp313-win_amd64.whl", hash = "sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420"}, + {file = "debugpy-1.8.17-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:045290c010bcd2d82bc97aa2daf6837443cd52f6328592698809b4549babcee1"}, + {file = "debugpy-1.8.17-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:b69b6bd9dba6a03632534cdf67c760625760a215ae289f7489a452af1031fe1f"}, + {file = "debugpy-1.8.17-cp314-cp314-win32.whl", hash = "sha256:5c59b74aa5630f3a5194467100c3b3d1c77898f9ab27e3f7dc5d40fc2f122670"}, + {file = "debugpy-1.8.17-cp314-cp314-win_amd64.whl", hash = "sha256:893cba7bb0f55161de4365584b025f7064e1f88913551bcd23be3260b231429c"}, + {file = "debugpy-1.8.17-cp38-cp38-macosx_15_0_x86_64.whl", hash = "sha256:8deb4e31cd575c9f9370042876e078ca118117c1b5e1f22c32befcfbb6955f0c"}, + {file = "debugpy-1.8.17-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:b75868b675949a96ab51abc114c7163f40ff0d8f7d6d5fd63f8932fd38e9c6d7"}, + {file = "debugpy-1.8.17-cp38-cp38-win32.whl", hash = "sha256:17e456da14848d618662354e1dccfd5e5fb75deec3d1d48dc0aa0baacda55860"}, + {file = "debugpy-1.8.17-cp38-cp38-win_amd64.whl", hash = "sha256:e851beb536a427b5df8aa7d0c7835b29a13812f41e46292ff80b2ef77327355a"}, + {file = "debugpy-1.8.17-cp39-cp39-macosx_15_0_x86_64.whl", hash = "sha256:f2ac8055a0c4a09b30b931100996ba49ef334c6947e7ae365cdd870416d7513e"}, + {file = "debugpy-1.8.17-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:eaa85bce251feca8e4c87ce3b954aba84b8c645b90f0e6a515c00394a9f5c0e7"}, + {file = "debugpy-1.8.17-cp39-cp39-win32.whl", hash = "sha256:b13eea5587e44f27f6c48588b5ad56dcb74a4f3a5f89250443c94587f3eb2ea1"}, + {file = "debugpy-1.8.17-cp39-cp39-win_amd64.whl", hash = "sha256:bb1bbf92317e1f35afcf3ef0450219efb3afe00be79d8664b250ac0933b9015f"}, + {file = "debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef"}, + {file = "debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e"}, ] [[package]] name = "decorator" -version = "5.1.1" +version = "5.2.1" description = "Decorators for Humans" -category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, ] [[package]] name = "defusedxml" version = "0.7.1" description = "XML bomb protection for Python stdlib modules" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -961,9 +1211,9 @@ files = [ name = "dicttoxml" version = "1.7.16" description = "Converts a Python dictionary or other native data type into a valid XML string." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "dicttoxml-1.7.16-py3-none-any.whl", hash = "sha256:8677671496d0d38e66c7179f82a7e9059f94887777955dc71b0ac602ee637c26"}, {file = "dicttoxml-1.7.16.tar.gz", hash = "sha256:6f36ce644881db5cd8940bee9b7cb3f3f6b7b327ba8a67d83d3e2caa0538bf9d"}, @@ -971,14 +1221,14 @@ files = [ [[package]] name = "dill" -version = "0.3.9" +version = "0.4.0" description = "serialize all of Python" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, - {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, + {file = "dill-0.4.0-py3-none-any.whl", hash = "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049"}, + {file = "dill-0.4.0.tar.gz", hash = "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0"}, ] [package.extras] @@ -987,46 +1237,46 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "dnspython" -version = "2.7.0" +version = "2.8.0" description = "DNS toolkit" -category = "main" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, - {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, + {file = "dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af"}, + {file = "dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=43)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=1.0.0)"] -idna = ["idna (>=3.7)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] +dev = ["black (>=25.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.17.0)", "mypy (>=1.17)", "pylint (>=3)", "pytest (>=8.4)", "pytest-cov (>=6.2.0)", "quart-trio (>=0.12.0)", "sphinx (>=8.2.0)", "sphinx-rtd-theme (>=3.0.0)", "twine (>=6.1.0)", "wheel (>=0.45.0)"] +dnssec = ["cryptography (>=45)"] +doh = ["h2 (>=4.2.0)", "httpcore (>=1.0.0)", "httpx (>=0.28.0)"] +doq = ["aioquic (>=1.2.0)"] +idna = ["idna (>=3.10)"] +trio = ["trio (>=0.30)"] +wmi = ["wmi (>=1.5.1) ; platform_system == \"Windows\""] [[package]] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] [[package]] name = "email-validator" -version = "2.2.0" +version = "2.3.0" description = "A robust email address syntax and deliverability validation library." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, - {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, + {file = "email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4"}, + {file = "email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426"}, ] [package.dependencies] @@ -1035,41 +1285,45 @@ idna = ">=2.0.0" [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version == \"3.10\"" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.2.0" +version = "2.2.1" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, - {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, + {file = "executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017"}, + {file = "executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "faker" version = "18.13.0" description = "Faker is a Python package that generates fake data for you." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "Faker-18.13.0-py3-none-any.whl", hash = "sha256:801d1a2d71f1fc54d332de2ab19de7452454309937233ea2f7485402882d67b3"}, {file = "Faker-18.13.0.tar.gz", hash = "sha256:84bcf92bb725dd7341336eea4685df9a364f16f2470c4d29c1d7e6c5fd5a457d"}, @@ -1080,14 +1334,14 @@ python-dateutil = ">=2.4" [[package]] name = "fastjsonschema" -version = "2.21.1" +version = "2.21.2" description = "Fastest Python implementation of JSON schema" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ - {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, - {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, + {file = "fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463"}, + {file = "fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de"}, ] [package.extras] @@ -1097,9 +1351,9 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" +groups = ["dev"] files = [ {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, @@ -1114,9 +1368,9 @@ pyflakes = ">=3.1.0,<3.2.0" name = "flask" version = "2.3.3" description = "A simple framework for building complex web applications." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, @@ -1137,9 +1391,9 @@ dotenv = ["python-dotenv"] name = "flask-bcrypt" version = "1.0.1" description = "Brcrypt hashing for Flask." -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, @@ -1151,27 +1405,27 @@ Flask = "*" [[package]] name = "flask-caching" -version = "2.3.0" +version = "2.3.1" description = "Adds caching support to Flask applications." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Flask_Caching-2.3.0-py3-none-any.whl", hash = "sha256:51771c75682e5abc1483b78b96d9131d7941dc669b073852edfa319dd4e29b6e"}, - {file = "flask_caching-2.3.0.tar.gz", hash = "sha256:d7e4ca64a33b49feb339fcdd17e6ba25f5e01168cf885e53790e885f83a4d2cf"}, + {file = "Flask_Caching-2.3.1-py3-none-any.whl", hash = "sha256:d3efcf600e5925ea5a2fcb810f13b341ae984f5b52c00e9d9070392f3ca10761"}, + {file = "flask_caching-2.3.1.tar.gz", hash = "sha256:65d7fd1b4eebf810f844de7de6258254b3248296ee429bdcb3f741bcbf7b98c9"}, ] [package.dependencies] -cachelib = ">=0.9.0,<0.10.0" +cachelib = ">=0.9.0" Flask = "*" [[package]] name = "flask-cors" version = "4.0.2" description = "A Flask extension adding a decorator for CORS support" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask_Cors-4.0.2-py2.py3-none-any.whl", hash = "sha256:38364faf1a7a5d0a55bd1d2e2f83ee9e359039182f5e6a029557e1f56d92c09a"}, {file = "flask_cors-4.0.2.tar.gz", hash = "sha256:493b98e2d1e2f1a4720a7af25693ef2fe32fbafec09a2f72c59f3e475eda61d2"}, @@ -1184,9 +1438,9 @@ Flask = ">=0.9" name = "flask-mail" version = "0.9.1" description = "Flask extension for sending email" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, ] @@ -1199,9 +1453,9 @@ Flask = "*" name = "flask-mailman" version = "1.1.1" description = "Porting Django's email implementation to your Flask applications." -category = "main" optional = false python-versions = "<4.0,>=3.7" +groups = ["main"] files = [ {file = "flask_mailman-1.1.1-py3-none-any.whl", hash = "sha256:0a66ead606b2ec9e4371d727f82709c7a51270bc5306be57c9f4ce0ed29dbe57"}, {file = "flask_mailman-1.1.1.tar.gz", hash = "sha256:3bc1ffffbd655ba9e468946a5f02e9cc772594fe1e98ace636c2f6717419eefa"}, @@ -1217,14 +1471,14 @@ test = ["aiosmtpd (>=1.4.4.post2,<2.0.0)", "black", "flake8", "isort", "pytest", [[package]] name = "flask-restx" -version = "1.3.0" +version = "1.3.2" description = "Fully featured framework for fast, easy and documented API development with Flask" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "flask-restx-1.3.0.tar.gz", hash = "sha256:4f3d3fa7b6191fcc715b18c201a12cd875176f92ba4acc61626ccfd571ee1728"}, - {file = "flask_restx-1.3.0-py2.py3-none-any.whl", hash = "sha256:636c56c3fb3f2c1df979e748019f084a938c4da2035a3e535a4673e4fc177691"}, + {file = "flask-restx-1.3.2.tar.gz", hash = "sha256:0ae13d77e7d7e4dce513970cfa9db45364aef210e99022de26d2b73eb4dbced5"}, + {file = "flask_restx-1.3.2-py2.py3-none-any.whl", hash = "sha256:6e035496e8223668044fc45bf769e526352fd648d9e159bd631d94fd645a687b"}, ] [package.dependencies] @@ -1232,21 +1486,21 @@ aniso8601 = ">=0.82" Flask = ">=0.8,<2.0.0 || >2.0.0" importlib-resources = "*" jsonschema = "*" -pytz = "*" +referencing = "*" werkzeug = "!=2.0.0" [package.extras] -dev = ["Faker (==2.0.0)", "black", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "tox", "twine (==3.8.0)", "tzlocal"] +dev = ["Faker (==2.0.0)", "backports.zoneinfo ; python_version < \"3.9\"", "black", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "tox", "twine (==3.8.0)"] doc = ["Sphinx (==5.3.0)", "alabaster (==0.7.12)", "sphinx-issues (==3.0.1)"] -test = ["Faker (==2.0.0)", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "twine (==3.8.0)", "tzlocal"] +test = ["Faker (==2.0.0)", "backports.zoneinfo ; python_version < \"3.9\"", "blinker", "invoke (==2.2.0)", "mock (==3.0.5)", "pytest (==7.0.1)", "pytest-benchmark (==3.4.1)", "pytest-cov (==4.0.0)", "pytest-flask (==1.3.0)", "pytest-mock (==3.6.1)", "pytest-profiling (==1.7.0)", "setuptools", "twine (==3.8.0)"] [[package]] name = "flask-sqlalchemy" version = "3.1.1" description = "Add SQLAlchemy support to your Flask application." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"}, {file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"}, @@ -1260,9 +1514,9 @@ sqlalchemy = ">=2.0.16" name = "fqdn" version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" -category = "dev" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +groups = ["dev"] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, @@ -1270,26 +1524,166 @@ files = [ [[package]] name = "freezegun" -version = "1.5.1" +version = "1.5.5" description = "Let your Python tests travel through time" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, - {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, + {file = "freezegun-1.5.5-py3-none-any.whl", hash = "sha256:cd557f4a75cf074e84bc374249b9dd491eaeacd61376b9eb3c423282211619d2"}, + {file = "freezegun-1.5.5.tar.gz", hash = "sha256:ac7742a6cc6c25a2c35e9292dfd554b897b517d2dec26891a2e8debf205cb94a"}, ] [package.dependencies] python-dateutil = ">=2.7" +[[package]] +name = "frozenlist" +version = "1.8.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7"}, + {file = "frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967"}, + {file = "frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa"}, + {file = "frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed"}, + {file = "frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7"}, + {file = "frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda"}, + {file = "frozenlist-1.8.0-cp39-cp39-win32.whl", hash = "sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103"}, + {file = "frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d"}, + {file = "frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad"}, +] + [[package]] name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, @@ -1303,149 +1697,144 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "greenlet" -version = "3.1.1" +version = "3.2.4" description = "Lightweight in-process concurrent programming" -category = "main" optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, - {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"" +files = [ + {file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31"}, + {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8"}, + {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, + {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079"}, + {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5"}, + {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, + {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6"}, + {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d"}, + {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, + {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504"}, + {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929"}, + {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, + {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, + {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681"}, + {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, + {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433"}, + {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be"}, + {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, + {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, + {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, ] [package.extras] docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] +test = ["objgraph", "psutil", "setuptools"] [[package]] name = "growthbook" -version = "1.1.0" +version = "1.4.7" description = "Powerful Feature flagging and A/B testing for Python apps" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "growthbook-1.1.0-py2.py3-none-any.whl", hash = "sha256:4e74345d81172ea58d0a5e348218fbb4c6a2f218144ee139efbec444c18a6fc2"}, - {file = "growthbook-1.1.0.tar.gz", hash = "sha256:56cc27df0f241491c27efe18b510833bb8d44e8004038ebc30db2bd5c598e879"}, + {file = "growthbook-1.4.7-py2.py3-none-any.whl", hash = "sha256:a9f2d0ee1fa48c3bcd3075f2ff94e238713cc972333585a87566907aa30aa322"}, + {file = "growthbook-1.4.7.tar.gz", hash = "sha256:2dab2b2b8aecabf4bbbfa8acf37804eeea8c55cd419d09c5e00648b2b728be13"}, ] [package.dependencies] +aiohttp = ">=3.6.0" cryptography = "*" -typing-extensions = "*" +typing_extensions = "*" urllib3 = "*" [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.9" description = "A minimal low-level HTTP client." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.13,<0.15" +h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -1454,26 +1843,26 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = ">=1.0.0,<2.0.0" +httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.10" +version = "3.11" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, ] [package.extras] @@ -1481,42 +1870,42 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "importlib-metadata" -version = "8.6.1" +version = "8.7.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, ] [package.dependencies] zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "importlib-resources" version = "6.5.2" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec"}, {file = "importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] @@ -1525,60 +1914,61 @@ type = ["pytest-mypy"] [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.3.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, + {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, ] [[package]] name = "ipykernel" -version = "6.29.5" +version = "7.1.0" description = "IPython Kernel for Jupyter" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, + {file = "ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c"}, + {file = "ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db"}, ] [package.dependencies] -appnope = {version = "*", markers = "platform_system == \"Darwin\""} +appnope = {version = ">=0.1.2", markers = "platform_system == \"Darwin\""} comm = ">=0.1.1" debugpy = ">=1.6.5" ipython = ">=7.23.1" -jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-client = ">=8.0.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" matplotlib-inline = ">=0.1" -nest-asyncio = "*" -packaging = "*" -psutil = "*" -pyzmq = ">=24" -tornado = ">=6.1" +nest-asyncio = ">=1.4" +packaging = ">=22" +psutil = ">=5.7" +pyzmq = ">=25" +tornado = ">=6.2" traitlets = ">=5.4.0" [package.extras] -cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +cov = ["coverage[toml]", "matplotlib", "pytest-cov", "trio"] +docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx (<8.2.0)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] -test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0,<9)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" -version = "8.32.0" +version = "8.37.0" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.10" +groups = ["dev"] +markers = "python_version == \"3.10\"" files = [ - {file = "ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa"}, - {file = "ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251"}, + {file = "ipython-8.37.0-py3-none-any.whl", hash = "sha256:ed87326596b878932dbcb171e3e698845434d8c61b8d8cd474bf663041a9dcf2"}, + {file = "ipython-8.37.0.tar.gz", hash = "sha256:ca815841e1a41a1e6b73a0b08f3038af9b2252564d01fc405356d34033012216"}, ] [package.dependencies] @@ -1597,7 +1987,7 @@ typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] kernel = ["ipykernel"] matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] @@ -1606,26 +1996,76 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] +test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "ipython" +version = "9.7.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.11" +groups = ["dev"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f"}, + {file = "ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e"}, +] + +[package.dependencies] +colorama = {version = ">=0.4.4", markers = "sys_platform == \"win32\""} +decorator = ">=4.3.2" +ipython-pygments-lexers = ">=1.0.0" +jedi = ">=0.18.1" +matplotlib-inline = ">=0.1.5" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.11.0" +stack_data = ">=0.6.0" +traitlets = ">=5.13.0" +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[doc,matplotlib,test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[matplotlib,test]", "setuptools (>=70.0)", "sphinx (>=8.0)", "sphinx-rtd-theme (>=0.1.8)", "sphinx_toml (==0.0.4)", "typing_extensions"] +matplotlib = ["matplotlib (>3.9)"] +test = ["packaging (>=20.1.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=1.0.0)", "setuptools (>=61.2)", "testpath (>=0.2)"] +test-extra = ["curio", "ipykernel (>6.30)", "ipython[matplotlib]", "ipython[test]", "jupyter_ai", "nbclient", "nbformat", "numpy (>=1.27)", "pandas (>2.1)", "trio (>=0.1.0)"] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +description = "Defines a variety of Pygments lexers for highlighting IPython code." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, + {file = "ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81"}, +] + +[package.dependencies] +pygments = "*" [[package]] name = "ipywidgets" -version = "8.1.5" +version = "8.1.8" description = "Jupyter interactive widgets" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"}, - {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"}, + {file = "ipywidgets-8.1.8-py3-none-any.whl", hash = "sha256:ecaca67aed704a338f88f67b1181b58f821ab5dc89c1f0f5ef99db43c1c2921e"}, + {file = "ipywidgets-8.1.8.tar.gz", hash = "sha256:61f969306b95f85fba6b6986b7fe45d73124d1d9e3023a8068710d47a22ea668"}, ] [package.dependencies] comm = ">=0.1.3" ipython = ">=6.1.0" -jupyterlab-widgets = ">=3.0.12,<3.1.0" +jupyterlab_widgets = ">=3.0.15,<3.1.0" traitlets = ">=4.3.1" -widgetsnbextension = ">=4.0.12,<4.1.0" +widgetsnbextension = ">=4.0.14,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] @@ -1634,9 +2074,9 @@ test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] name = "isodate" version = "0.7.2" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, @@ -1646,9 +2086,9 @@ files = [ name = "isoduration" version = "20.11.0" description = "Operations with ISO 8601 durations" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, @@ -1661,9 +2101,9 @@ arrow = ">=0.15.0" name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1676,9 +2116,9 @@ colors = ["colorama (>=0.4.6)"] name = "itsdangerous" version = "2.2.0" description = "Safely pass data to untrusted environments and back." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, @@ -1688,9 +2128,9 @@ files = [ name = "jedi" version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, @@ -1706,14 +2146,14 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] [[package]] name = "jinja2" -version = "3.1.5" +version = "3.1.6" description = "A very fast and expressive template engine." -category = "main" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1724,26 +2164,26 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "json5" -version = "0.10.0" +version = "0.12.1" description = "A Python implementation of the JSON5 data format." -category = "dev" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ - {file = "json5-0.10.0-py3-none-any.whl", hash = "sha256:19b23410220a7271e8377f81ba8aacba2fdd56947fbb137ee5977cbe1f5e8dfa"}, - {file = "json5-0.10.0.tar.gz", hash = "sha256:e66941c8f0a02026943c52c2eb34ebeb2a6f819a0be05920a6f5243cd30fd559"}, + {file = "json5-0.12.1-py3-none-any.whl", hash = "sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5"}, + {file = "json5-0.12.1.tar.gz", hash = "sha256:b2743e77b3242f8d03c143dd975a6ec7c52e2f2afe76ed934e53503dd4ad4990"}, ] [package.extras] -dev = ["build (==1.2.2.post1)", "coverage (==7.5.3)", "mypy (==1.13.0)", "pip (==24.3.1)", "pylint (==3.2.3)", "ruff (==0.7.3)", "twine (==5.1.1)", "uv (==0.5.1)"] +dev = ["build (==1.2.2.post1)", "coverage (==7.5.4) ; python_version < \"3.9\"", "coverage (==7.8.0) ; python_version >= \"3.9\"", "mypy (==1.14.1) ; python_version < \"3.9\"", "mypy (==1.15.0) ; python_version >= \"3.9\"", "pip (==25.0.1)", "pylint (==3.2.7) ; python_version < \"3.9\"", "pylint (==3.3.6) ; python_version >= \"3.9\"", "ruff (==0.11.2)", "twine (==6.1.0)", "uv (==0.6.11)"] [[package]] name = "jsonpointer" version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -1751,14 +2191,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.23.0" +version = "4.25.1" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, + {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"}, + {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"}, ] [package.dependencies] @@ -1771,24 +2211,25 @@ jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +rfc3987-syntax = {version = ">=1.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "rfc3987-syntax (>=1.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" -version = "2024.10.1" +version = "2025.9.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, + {file = "jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe"}, + {file = "jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d"}, ] [package.dependencies] @@ -1798,9 +2239,9 @@ referencing = ">=0.31.0" name = "jupyter" version = "1.1.1" description = "Jupyter metapackage. Install all the Jupyter components in one go." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83"}, {file = "jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a"}, @@ -1818,16 +2259,16 @@ notebook = "*" name = "jupyter-client" version = "8.6.3" description = "Jupyter protocol implementation and client libraries" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, ] [package.dependencies] -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" tornado = ">=6.2" @@ -1835,15 +2276,15 @@ traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-console" version = "6.6.3" description = "Jupyter terminal console" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, @@ -1853,7 +2294,7 @@ files = [ ipykernel = ">=6.14" ipython = "*" jupyter-client = ">=7.0.0" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" prompt-toolkit = ">=3.0.30" pygments = "*" pyzmq = ">=17" @@ -1864,32 +2305,31 @@ test = ["flaky", "pexpect", "pytest"] [[package]] name = "jupyter-core" -version = "5.7.2" +version = "5.9.1" description = "Jupyter core package. A base package on which Jupyter projects rely." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, + {file = "jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407"}, + {file = "jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508"}, ] [package.dependencies] platformdirs = ">=2.5" -pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = ">=5.3" [package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] +docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<9)", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" version = "0.12.0" description = "Jupyter Event System library" -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, @@ -1912,29 +2352,29 @@ test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "p [[package]] name = "jupyter-lsp" -version = "2.2.5" +version = "2.3.0" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, - {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, + {file = "jupyter_lsp-2.3.0-py3-none-any.whl", hash = "sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f"}, + {file = "jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245"}, ] [package.dependencies] -jupyter-server = ">=1.1.2" +jupyter_server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.15.0" +version = "2.17.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "jupyter_server-2.15.0-py3-none-any.whl", hash = "sha256:872d989becf83517012ee669f09604aa4a28097c0bd90b2f424310156c2cdae3"}, - {file = "jupyter_server-2.15.0.tar.gz", hash = "sha256:9d446b8697b4f7337a1b7cdcac40778babdd93ba614b6d68ab1c0c918f1c4084"}, + {file = "jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f"}, + {file = "jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5"}, ] [package.dependencies] @@ -1942,12 +2382,12 @@ anyio = ">=3.1.0" argon2-cffi = ">=21.1" jinja2 = ">=3.0.3" jupyter-client = ">=7.4.4" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" jupyter-events = ">=0.11.0" jupyter-server-terminals = ">=0.4.4" nbconvert = ">=6.4.4" nbformat = ">=5.3.0" -overrides = ">=5.0" +overrides = {version = ">=5.0", markers = "python_version < \"3.12\""} packaging = ">=22.0" prometheus-client = ">=0.9" pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} @@ -1966,9 +2406,9 @@ test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console name = "jupyter-server-terminals" version = "0.5.3" description = "A Jupyter Server Extension Providing Terminals." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, @@ -1984,20 +2424,20 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.3.5" +version = "4.4.10" description = "JupyterLab computational environment" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "jupyterlab-4.3.5-py3-none-any.whl", hash = "sha256:571bbdee20e4c5321ab5195bc41cf92a75a5cff886be5e57ce78dfa37a5e9fdb"}, - {file = "jupyterlab-4.3.5.tar.gz", hash = "sha256:c779bf72ced007d7d29d5bcef128e7fdda96ea69299e19b04a43635a7d641f9d"}, + {file = "jupyterlab-4.4.10-py3-none-any.whl", hash = "sha256:65939ab4c8dcd0c42185c2d0d1a9d60b254dc8c46fc4fdb286b63c51e9358e07"}, + {file = "jupyterlab-4.4.10.tar.gz", hash = "sha256:521c017508af4e1d6d9d8a9d90f47a11c61197ad63b2178342489de42540a615"}, ] [package.dependencies] async-lru = ">=1.0.0" -httpx = ">=0.25.0" -ipykernel = ">=6.5.0" +httpx = ">=0.25.0,<1" +ipykernel = ">=6.5.0,<6.30.0 || >6.30.0" jinja2 = ">=3.0.3" jupyter-core = "*" jupyter-lsp = ">=2.0.0" @@ -2005,15 +2445,15 @@ jupyter-server = ">=2.4.0,<3" jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2" packaging = "*" -setuptools = ">=40.8.0" +setuptools = ">=41.1.0" tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.6.9)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<8.1.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.4.1)", "ipython (==8.16.1)", "ipywidgets (==8.1.5)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.2.post3)", "matplotlib (==3.9.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.3)", "scipy (==1.14.1)", "vega-datasets (==0.9.0)"] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.11.4)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<8.2.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.5.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.5)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.3.post1)", "matplotlib (==3.10.0)", "nbconvert (>=7.0.0)", "pandas (==2.2.3)", "scipy (==1.15.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] @@ -2021,9 +2461,9 @@ upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)" name = "jupyterlab-pygments" version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -2031,14 +2471,14 @@ files = [ [[package]] name = "jupyterlab-server" -version = "2.27.3" +version = "2.28.0" description = "A set of server components for JupyterLab and JupyterLab like applications." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, - {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, + {file = "jupyterlab_server-2.28.0-py3-none-any.whl", hash = "sha256:e4355b148fdcf34d312bbbc80f22467d6d20460e8b8736bf235577dd18506968"}, + {file = "jupyterlab_server-2.28.0.tar.gz", hash = "sha256:35baa81898b15f93573e2deca50d11ac0ae407ebb688299d3a5213265033712c"}, ] [package.dependencies] @@ -2057,73 +2497,98 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v [[package]] name = "jupyterlab-widgets" -version = "3.0.13" +version = "3.0.16" description = "Jupyter interactive widgets for JupyterLab" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, - {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, + {file = "jupyterlab_widgets-3.0.16-py3-none-any.whl", hash = "sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8"}, + {file = "jupyterlab_widgets-3.0.16.tar.gz", hash = "sha256:423da05071d55cf27a9e602216d35a3a65a3e41cdf9c5d3b643b814ce38c19e0"}, ] [[package]] -name = "lazy-object-proxy" -version = "1.10.0" -description = "A fast and thorough lazy object proxy." -category = "dev" +name = "lark" +version = "1.3.1" +description = "a modern parsing library" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "lazy-object-proxy-1.10.0.tar.gz", hash = "sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win32.whl", hash = "sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9"}, - {file = "lazy_object_proxy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win32.whl", hash = "sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03"}, - {file = "lazy_object_proxy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win32.whl", hash = "sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074"}, - {file = "lazy_object_proxy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win32.whl", hash = "sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4"}, - {file = "lazy_object_proxy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win32.whl", hash = "sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70"}, - {file = "lazy_object_proxy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd"}, - {file = "lazy_object_proxy-1.10.0-pp310.pp311.pp312.pp38.pp39-none-any.whl", hash = "sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d"}, + {file = "lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12"}, + {file = "lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905"}, +] + +[package.extras] +atomic-cache = ["atomicwrites"] +interegular = ["interegular (>=0.3.1,<0.4.0)"] +nearley = ["js2py"] +regex = ["regex"] + +[[package]] +name = "lazy-object-proxy" +version = "1.12.0" +description = "A fast and thorough lazy object proxy." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "lazy_object_proxy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61d5e3310a4aa5792c2b599a7a78ccf8687292c8eb09cf187cca8f09cf6a7519"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1ca33565f698ac1aece152a10f432415d1a2aa9a42dfe23e5ba2bc255ab91f6"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01c7819a410f7c255b20799b65d36b414379a30c6f1684c7bd7eb6777338c1b"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:029d2b355076710505c9545aef5ab3f750d89779310e26ddf2b7b23f6ea03cd8"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc6e3614eca88b1c8a625fc0a47d0d745e7c3255b21dac0e30b3037c5e3deeb8"}, + {file = "lazy_object_proxy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:be5fe974e39ceb0d6c9db0663c0464669cf866b2851c73971409b9566e880eab"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1cf69cd1a6c7fe2dbcc3edaa017cf010f4192e53796538cc7d5e1fedbfa4bcff"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:efff4375a8c52f55a145dc8487a2108c2140f0bec4151ab4e1843e52eb9987ad"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1192e8c2f1031a6ff453ee40213afa01ba765b3dc861302cd91dbdb2e2660b00"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3605b632e82a1cbc32a1e5034278a64db555b3496e0795723ee697006b980508"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a61095f5d9d1a743e1e20ec6d6db6c2ca511961777257ebd9b288951b23b44fa"}, + {file = "lazy_object_proxy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:997b1d6e10ecc6fb6fe0f2c959791ae59599f41da61d652f6c903d1ee58b7370"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ee0d6027b760a11cc18281e702c0309dd92da458a74b4c15025d7fc490deede"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4ab2c584e3cc8be0dfca422e05ad30a9abe3555ce63e9ab7a559f62f8dbc6ff9"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:14e348185adbd03ec17d051e169ec45686dcd840a3779c9d4c10aabe2ca6e1c0"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c4fcbe74fb85df8ba7825fa05eddca764138da752904b378f0ae5ab33a36c308"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:563d2ec8e4d4b68ee7848c5ab4d6057a6d703cb7963b342968bb8758dda33a23"}, + {file = "lazy_object_proxy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:53c7fd99eb156bbb82cbc5d5188891d8fdd805ba6c1e3b92b90092da2a837073"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:86fd61cb2ba249b9f436d789d1356deae69ad3231dc3c0f17293ac535162672e"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:81d1852fb30fab81696f93db1b1e55a5d1ff7940838191062f5f56987d5fcc3e"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be9045646d83f6c2664c1330904b245ae2371b5c57a3195e4028aedc9f999655"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:67f07ab742f1adfb3966c40f630baaa7902be4222a17941f3d85fd1dae5565ff"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:75ba769017b944fcacbf6a80c18b2761a1795b03f8899acdad1f1c39db4409be"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:7b22c2bbfb155706b928ac4d74c1a63ac8552a55ba7fff4445155523ea4067e1"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4a79b909aa16bde8ae606f06e6bbc9d3219d2e57fb3e0076e17879072b742c65"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:338ab2f132276203e404951205fe80c3fd59429b3a724e7b662b2eb539bb1be9"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c40b3c9faee2e32bfce0df4ae63f4e73529766893258eca78548bac801c8f66"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:717484c309df78cedf48396e420fa57fc8a2b1f06ea889df7248fdd156e58847"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a6b7ea5ea1ffe15059eb44bcbcb258f97bcb40e139b88152c40d07b1a1dfc9ac"}, + {file = "lazy_object_proxy-1.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:08c465fb5cd23527512f9bd7b4c7ba6cec33e28aad36fbbe46bf7b858f9f3f7f"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c9defba70ab943f1df98a656247966d7729da2fe9c2d5d85346464bf320820a3"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6763941dbf97eea6b90f5b06eb4da9418cc088fce0e3883f5816090f9afcde4a"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fdc70d81235fc586b9e3d1aeef7d1553259b62ecaae9db2167a5d2550dcc391a"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0a83c6f7a6b2bfc11ef3ed67f8cbe99f8ff500b05655d8e7df9aab993a6abc95"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:256262384ebd2a77b023ad02fbcc9326282bcfd16484d5531154b02bc304f4c5"}, + {file = "lazy_object_proxy-1.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7601ec171c7e8584f8ff3f4e440aa2eebf93e854f04639263875b8c2971f819f"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae575ad9b674d0029fc077c5231b3bc6b433a3d1a62a8c363df96974b5534728"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31020c84005d3daa4cc0fa5a310af2066efe6b0d82aeebf9ab199292652ff036"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800f32b00a47c27446a2b767df7538e6c66a3488632c402b4fb2224f9794f3c0"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:15400b18893f345857b9e18b9bd87bd06aba84af6ed086187add70aeaa3f93f1"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3d3964fbd326578bcdfffd017ef101b6fb0484f34e731fe060ba9b8816498c36"}, + {file = "lazy_object_proxy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:424a8ab6695400845c39f13c685050eab69fa0bbac5790b201cd27375e5e41d7"}, + {file = "lazy_object_proxy-1.12.0-pp39.pp310.pp311.graalpy311-none-any.whl", hash = "sha256:c3b2e0af1f7f77c4263759c4824316ce458fabe0fceadcd24ef8ca08b2d1e402"}, + {file = "lazy_object_proxy-1.12.0.tar.gz", hash = "sha256:1f5a462d92fd0cfb82f1fab28b51bfb209fabbe6aabf7f0d51472c0c124c0c61"}, ] [[package]] name = "mako" -version = "1.3.9" +version = "1.3.10" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"}, - {file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"}, + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, ] [package.dependencies] @@ -2138,9 +2603,9 @@ testing = ["pytest"] name = "markdown" version = "3.3.7" description = "Python implementation of Markdown." -category = "main" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, @@ -2151,97 +2616,128 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "3.0.2" +version = "3.0.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false python-versions = ">=3.9" -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +groups = ["main", "dev"] +files = [ + {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, + {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591"}, + {file = "markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6"}, + {file = "markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1"}, + {file = "markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8"}, + {file = "markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad"}, + {file = "markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf"}, + {file = "markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115"}, + {file = "markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a"}, + {file = "markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01"}, + {file = "markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e"}, + {file = "markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d"}, + {file = "markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f"}, + {file = "markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b"}, + {file = "markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c"}, + {file = "markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795"}, + {file = "markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676"}, + {file = "markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc"}, + {file = "markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12"}, + {file = "markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5"}, + {file = "markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73"}, + {file = "markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025"}, + {file = "markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb"}, + {file = "markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218"}, + {file = "markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe"}, + {file = "markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97"}, + {file = "markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf"}, + {file = "markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe"}, + {file = "markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581"}, + {file = "markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab"}, + {file = "markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50"}, + {file = "markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523"}, + {file = "markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9"}, + {file = "markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26"}, + {file = "markupsafe-3.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42"}, + {file = "markupsafe-3.0.3-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2"}, + {file = "markupsafe-3.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d"}, + {file = "markupsafe-3.0.3-cp39-cp39-win32.whl", hash = "sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e"}, + {file = "markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8"}, + {file = "markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698"}, ] [[package]] name = "matplotlib-inline" -version = "0.1.7" +version = "0.2.1" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, + {file = "matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76"}, + {file = "matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe"}, ] [package.dependencies] traitlets = "*" +[package.extras] +test = ["flake8", "nbdime", "nbval", "notebook", "pytest"] + [[package]] name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -2251,9 +2747,9 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, @@ -2263,9 +2759,9 @@ files = [ name = "minilog" version = "2.3.1" description = "Minimalistic wrapper for Python logging." -category = "main" optional = false python-versions = "<4.0,>=3.8" +groups = ["main", "dev"] files = [ {file = "minilog-2.3.1-py3-none-any.whl", hash = "sha256:1a679fefe6140ce1d59c3246adc991f9eb480169e5a6c54d2be9023ee459dc30"}, {file = "minilog-2.3.1.tar.gz", hash = "sha256:4b602572c3bcdd2d8f00d879f635c0de9e632d5d0307e131c91074be8acf444e"}, @@ -2273,14 +2769,14 @@ files = [ [[package]] name = "mistune" -version = "3.1.2" +version = "3.1.4" description = "A sane and fast Markdown parser with useful plugins and renderers" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "mistune-3.1.2-py3-none-any.whl", hash = "sha256:4b47731332315cdca99e0ded46fc0004001c1299ff773dfb48fbe1fd226de319"}, - {file = "mistune-3.1.2.tar.gz", hash = "sha256:733bf018ba007e8b5f2d3a9eb624034f6ee26c4ea769a98ec533ee111d504dff"}, + {file = "mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d"}, + {file = "mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164"}, ] [package.dependencies] @@ -2290,9 +2786,9 @@ typing-extensions = {version = "*", markers = "python_version < \"3.11\""} name = "mkdocs" version = "1.3.1" description = "Project documentation with Markdown." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mkdocs-1.3.1-py3-none-any.whl", hash = "sha256:fda92466393127d2da830bc6edc3a625a14b436316d1caf347690648e774c4f0"}, {file = "mkdocs-1.3.1.tar.gz", hash = "sha256:a41a2ff25ce3bbacc953f9844ba07d106233cd76c88bac1f59cb1564ac0d87ed"}, @@ -2314,71 +2810,215 @@ watchdog = ">=2.0" i18n = ["babel (>=2.9.0)"] [[package]] -name = "msrest" -version = "0.7.1" -description = "AutoRest swagger generator Python client runtime." -category = "main" +name = "multidict" +version = "6.7.0" +description = "multidict implementation" optional = false -python-versions = ">=3.6" -files = [ - {file = "msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32"}, - {file = "msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36"}, + {file = "multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85"}, + {file = "multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7"}, + {file = "multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34"}, + {file = "multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff"}, + {file = "multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81"}, + {file = "multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8"}, + {file = "multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4"}, + {file = "multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b"}, + {file = "multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288"}, + {file = "multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17"}, + {file = "multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390"}, + {file = "multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6"}, + {file = "multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d"}, + {file = "multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6"}, + {file = "multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f"}, + {file = "multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885"}, + {file = "multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c"}, + {file = "multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0"}, + {file = "multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13"}, + {file = "multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd"}, + {file = "multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4"}, + {file = "multidict-6.7.0-cp39-cp39-win32.whl", hash = "sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91"}, + {file = "multidict-6.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f"}, + {file = "multidict-6.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546"}, + {file = "multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3"}, + {file = "multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5"}, ] [package.dependencies] -azure-core = ">=1.24.0" -certifi = ">=2017.4.17" -isodate = ">=0.6.0" -requests = ">=2.16,<3.0" -requests-oauthlib = ">=0.5.0" - -[package.extras] -async = ["aiodns", "aiohttp (>=3.0)"] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "mypy" -version = "1.15.0" +version = "1.18.2" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.9" -files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, +groups = ["dev"] +files = [ + {file = "mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c"}, + {file = "mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e"}, + {file = "mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b"}, + {file = "mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66"}, + {file = "mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428"}, + {file = "mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed"}, + {file = "mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f"}, + {file = "mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341"}, + {file = "mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d"}, + {file = "mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86"}, + {file = "mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37"}, + {file = "mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8"}, + {file = "mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34"}, + {file = "mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764"}, + {file = "mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893"}, + {file = "mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914"}, + {file = "mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8"}, + {file = "mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074"}, + {file = "mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc"}, + {file = "mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e"}, + {file = "mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986"}, + {file = "mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d"}, + {file = "mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba"}, + {file = "mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544"}, + {file = "mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce"}, + {file = "mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d"}, + {file = "mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c"}, + {file = "mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb"}, + {file = "mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075"}, + {file = "mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf"}, + {file = "mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b"}, + {file = "mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133"}, + {file = "mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6"}, + {file = "mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac"}, + {file = "mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b"}, + {file = "mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0"}, + {file = "mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e"}, + {file = "mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing_extensions = ">=4.6.0" @@ -2391,23 +3031,23 @@ reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "1.0.0" +version = "1.1.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] [[package]] name = "nbclient" version = "0.10.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." -category = "dev" optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, @@ -2415,7 +3055,7 @@ files = [ [package.dependencies] jupyter-client = ">=6.1.12" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" nbformat = ">=5.1" traitlets = ">=5.4" @@ -2428,9 +3068,9 @@ test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>= name = "nbconvert" version = "7.16.6" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, @@ -2465,9 +3105,9 @@ webpdf = ["playwright"] name = "nbformat" version = "5.10.4" description = "The Jupyter Notebook format" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -2476,7 +3116,7 @@ files = [ [package.dependencies] fastjsonschema = ">=2.15" jsonschema = ">=2.6" -jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" traitlets = ">=5.1" [package.extras] @@ -2487,9 +3127,9 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] name = "nest-asyncio" version = "1.6.0" description = "Patch asyncio to allow nested event loops" -category = "dev" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -2497,19 +3137,19 @@ files = [ [[package]] name = "notebook" -version = "7.3.2" +version = "7.4.7" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "notebook-7.3.2-py3-none-any.whl", hash = "sha256:e5f85fc59b69d3618d73cf27544418193ff8e8058d5bf61d315ce4f473556288"}, - {file = "notebook-7.3.2.tar.gz", hash = "sha256:705e83a1785f45b383bf3ee13cb76680b92d24f56fb0c7d2136fe1d850cd3ca8"}, + {file = "notebook-7.4.7-py3-none-any.whl", hash = "sha256:362b7c95527f7dd3c4c84d410b782872fd9c734fb2524c11dd92758527b6eda6"}, + {file = "notebook-7.4.7.tar.gz", hash = "sha256:3f0a04027dfcee8a876de48fba13ab77ec8c12f72f848a222ed7f5081b9e342a"}, ] [package.dependencies] jupyter-server = ">=2.4.0,<3" -jupyterlab = ">=4.3.4,<4.4" +jupyterlab = ">=4.4.9,<4.5" jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2,<0.3" tornado = ">=6.2.0" @@ -2517,15 +3157,15 @@ tornado = ">=6.2.0" [package.extras] dev = ["hatch", "pre-commit"] docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] +test = ["importlib-resources (>=5.0) ; python_version < \"3.10\"", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] [[package]] name = "notebook-shim" version = "0.2.4" description = "A shim layer for notebook traits and config" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, @@ -2541,9 +3181,9 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" name = "numpy" version = "1.26.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -2583,30 +3223,14 @@ files = [ {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - [[package]] name = "overrides" version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "python_version < \"3.12\"" files = [ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, @@ -2614,110 +3238,23 @@ files = [ [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] -[[package]] -name = "pandas" -version = "2.2.3" -description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" -optional = false -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - [[package]] name = "pandocfilters" version = "1.5.1" description = "Utilities for writing pandoc filters in python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -2727,9 +3264,9 @@ files = [ name = "paramiko" version = "3.5.1" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61"}, {file = "paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822"}, @@ -2741,20 +3278,20 @@ cryptography = ">=3.3" pynacl = ">=1.5" [package.extras] -all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +all = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] +gssapi = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] invoke = ["invoke (>=2.0)"] [[package]] name = "parso" -version = "0.8.4" +version = "0.8.5" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, + {file = "parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887"}, + {file = "parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a"}, ] [package.extras] @@ -2765,9 +3302,9 @@ testing = ["docopt", "pytest"] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, @@ -2777,9 +3314,9 @@ files = [ name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -2789,9 +3326,10 @@ files = [ name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -2802,44 +3340,44 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.3.6" +version = "4.5.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, + {file = "platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3"}, + {file = "platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] +docs = ["furo (>=2025.9.25)", "proselint (>=0.14)", "sphinx (>=8.2.3)", "sphinx-autodoc-typehints (>=3.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.4.2)", "pytest-cov (>=7)", "pytest-mock (>=3.15.1)"] +type = ["mypy (>=1.18.2)"] [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "poethepoet" version = "0.20.0" description = "A task runner that works well with poetry." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "poethepoet-0.20.0-py3-none-any.whl", hash = "sha256:cb37be15f3895ccc65ddf188c2e3d8fb79e26cc9d469a6098cb1c6f994659f6f"}, {file = "poethepoet-0.20.0.tar.gz", hash = "sha256:ca5a2a955f52dfb0a53fad3c989ef0b69ce3d5ec0f6bfa9b1da1f9e32d262e20"}, @@ -2852,16 +3390,77 @@ tomli = ">=1.2.2" [package.extras] poetry-plugin = ["poetry (>=1.0,<2.0)"] +[[package]] +name = "polars" +version = "1.35.2" +description = "Blazingly fast DataFrame library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "polars-1.35.2-py3-none-any.whl", hash = "sha256:5e8057c8289ac148c793478323b726faea933d9776bd6b8a554b0ab7c03db87e"}, + {file = "polars-1.35.2.tar.gz", hash = "sha256:ae458b05ca6e7ca2c089342c70793f92f1103c502dc1b14b56f0a04f2cc1d205"}, +] + +[package.dependencies] +polars-runtime-32 = "1.35.2" + +[package.extras] +adbc = ["adbc-driver-manager[dbapi]", "adbc-driver-sqlite[dbapi]"] +all = ["polars[async,cloudpickle,database,deltalake,excel,fsspec,graph,iceberg,numpy,pandas,plot,pyarrow,pydantic,style,timezone]"] +async = ["gevent"] +calamine = ["fastexcel (>=0.9)"] +cloudpickle = ["cloudpickle"] +connectorx = ["connectorx (>=0.3.2)"] +database = ["polars[adbc,connectorx,sqlalchemy]"] +deltalake = ["deltalake (>=1.0.0)"] +excel = ["polars[calamine,openpyxl,xlsx2csv,xlsxwriter]"] +fsspec = ["fsspec"] +gpu = ["cudf-polars-cu12"] +graph = ["matplotlib"] +iceberg = ["pyiceberg (>=0.7.1)"] +numpy = ["numpy (>=1.16.0)"] +openpyxl = ["openpyxl (>=3.0.0)"] +pandas = ["pandas", "polars[pyarrow]"] +plot = ["altair (>=5.4.0)"] +polars-cloud = ["polars_cloud (>=0.0.1a1)"] +pyarrow = ["pyarrow (>=7.0.0)"] +pydantic = ["pydantic"] +rt64 = ["polars-runtime-64 (==1.35.2)"] +rtcompat = ["polars-runtime-compat (==1.35.2)"] +sqlalchemy = ["polars[pandas]", "sqlalchemy"] +style = ["great-tables (>=0.8.0)"] +timezone = ["tzdata ; platform_system == \"Windows\""] +xlsx2csv = ["xlsx2csv (>=0.8.0)"] +xlsxwriter = ["xlsxwriter"] + +[[package]] +name = "polars-runtime-32" +version = "1.35.2" +description = "Blazingly fast DataFrame library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "polars_runtime_32-1.35.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:e465d12a29e8df06ea78947e50bd361cdf77535cd904fd562666a8a9374e7e3a"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:ef2b029b78f64fb53f126654c0bfa654045c7546bd0de3009d08bd52d660e8cc"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dda0994b5dff7f456bb2f4bbd22be9a9e5c5e28670e23fedb13601ec99a46d"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:3b9006902fc51b768ff747c0f74bd4ce04005ee8aeb290ce9c07ce1cbe1b58a9"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-win_amd64.whl", hash = "sha256:ddc015fac39735592e2e7c834c02193ba4d257bb4c8c7478b9ebe440b0756b84"}, + {file = "polars_runtime_32-1.35.2-cp39-abi3-win_arm64.whl", hash = "sha256:6861145aa321a44eda7cc6694fb7751cb7aa0f21026df51b5faa52e64f9dc39b"}, + {file = "polars_runtime_32-1.35.2.tar.gz", hash = "sha256:6e6e35733ec52abe54b7d30d245e6586b027d433315d20edfb4a5d162c79fe90"}, +] + [[package]] name = "prometheus-client" -version = "0.21.1" +version = "0.23.1" description = "Python client for the Prometheus monitoring system." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, - {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, + {file = "prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99"}, + {file = "prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce"}, ] [package.extras] @@ -2869,70 +3468,209 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.50" +version = "3.0.52" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, - {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, + {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, + {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "propcache" +version = "0.4.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c"}, + {file = "propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb"}, + {file = "propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37"}, + {file = "propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f"}, + {file = "propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1"}, + {file = "propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6"}, + {file = "propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75"}, + {file = "propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8"}, + {file = "propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db"}, + {file = "propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66"}, + {file = "propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81"}, + {file = "propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e"}, + {file = "propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1"}, + {file = "propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717"}, + {file = "propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37"}, + {file = "propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144"}, + {file = "propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f"}, + {file = "propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153"}, + {file = "propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455"}, + {file = "propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85"}, + {file = "propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1"}, + {file = "propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183"}, + {file = "propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19"}, + {file = "propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f"}, + {file = "propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938"}, + {file = "propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237"}, + {file = "propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d"}, +] + [[package]] name = "psutil" -version = "7.0.0" -description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." -category = "dev" +version = "7.1.3" +description = "Cross-platform lib for process and system monitoring." optional = false python-versions = ">=3.6" -files = [ - {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, - {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, - {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, - {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, - {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, - {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, - {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, - {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, +groups = ["dev"] +files = [ + {file = "psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc"}, + {file = "psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0"}, + {file = "psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7"}, + {file = "psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251"}, + {file = "psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa"}, + {file = "psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee"}, + {file = "psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353"}, + {file = "psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b"}, + {file = "psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9"}, + {file = "psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f"}, + {file = "psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7"}, + {file = "psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264"}, + {file = "psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab"}, + {file = "psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880"}, + {file = "psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3"}, + {file = "psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b"}, + {file = "psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd"}, + {file = "psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1"}, + {file = "psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74"}, ] [package.extras] -dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] -test = ["pytest", "pytest-xdist", "setuptools"] +dev = ["abi3audit", "black", "check-manifest", "colorama ; os_name == \"nt\"", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pyreadline ; os_name == \"nt\"", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-xdist", "pywin32 ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "validate-pyproject[all]", "virtualenv", "vulture", "wheel", "wheel ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "wmi ; os_name == \"nt\" and platform_python_implementation != \"PyPy\""] +test = ["pytest", "pytest-instafail", "pytest-subtests", "pytest-xdist", "pywin32 ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "setuptools", "wheel ; os_name == \"nt\" and platform_python_implementation != \"PyPy\"", "wmi ; os_name == \"nt\" and platform_python_implementation != \"PyPy\""] [[package]] name = "psycopg2" -version = "2.9.10" +version = "2.9.11" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, - {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, - {file = "psycopg2-2.9.10-cp311-cp311-win32.whl", hash = "sha256:47c4f9875125344f4c2b870e41b6aad585901318068acd01de93f3677a6522c2"}, - {file = "psycopg2-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:0435034157049f6846e95103bd8f5a668788dd913a7c30162ca9503fdf542cb4"}, - {file = "psycopg2-2.9.10-cp312-cp312-win32.whl", hash = "sha256:65a63d7ab0e067e2cdb3cf266de39663203d38d6a8ed97f5ca0cb315c73fe067"}, - {file = "psycopg2-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:4a579d6243da40a7b3182e0430493dbd55950c493d8c68f4eec0b302f6bbf20e"}, - {file = "psycopg2-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:91fd603a2155da8d0cfcdbf8ab24a2d54bca72795b90d2a3ed2b6da8d979dee2"}, - {file = "psycopg2-2.9.10-cp39-cp39-win32.whl", hash = "sha256:9d5b3b94b79a844a986d029eee38998232451119ad653aea42bb9220a8c5066b"}, - {file = "psycopg2-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:88138c8dedcbfa96408023ea2b0c369eda40fe5d75002c0964c78f46f11fa442"}, - {file = "psycopg2-2.9.10.tar.gz", hash = "sha256:12ec0b40b0273f95296233e8750441339298e6a572f7039da5b260e3c8b60e11"}, + {file = "psycopg2-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:103e857f46bb76908768ead4e2d0ba1d1a130e7b8ed77d3ae91e8b33481813e8"}, + {file = "psycopg2-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:210daed32e18f35e3140a1ebe059ac29209dd96468f2f7559aa59f75ee82a5cb"}, + {file = "psycopg2-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:e03e4a6dbe87ff81540b434f2e5dc2bddad10296db5eea7bdc995bf5f4162938"}, + {file = "psycopg2-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:8dc379166b5b7d5ea66dcebf433011dfc51a7bb8a5fc12367fa05668e5fc53c8"}, + {file = "psycopg2-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:f10a48acba5fe6e312b891f290b4d2ca595fc9a06850fe53320beac353575578"}, + {file = "psycopg2-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:6ecddcf573777536bddfefaea8079ce959287798c8f5804bee6933635d538924"}, + {file = "psycopg2-2.9.11.tar.gz", hash = "sha256:964d31caf728e217c697ff77ea69c2ba0865fa41ec20bb00f0977e62fdcc52e3"}, ] [[package]] name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] +markers = "os_name != \"nt\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -2942,9 +3680,9 @@ files = [ name = "pure-eval" version = "0.2.3" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -2955,14 +3693,14 @@ tests = ["pytest"] [[package]] name = "pycap" -version = "2.6.0" +version = "2.7.0" description = "PyCap: Python interface to REDCap" -category = "main" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.10" +groups = ["main"] files = [ - {file = "pycap-2.6.0-py3-none-any.whl", hash = "sha256:404a7ba299fa57f0fcadd9f4b6df123e593deda1dcb12b341f39b416b6e83d6b"}, - {file = "pycap-2.6.0.tar.gz", hash = "sha256:68d7403bf573b03ae24cb252fb1e5f73fe365b6c9d54c46199014edaffcc8f94"}, + {file = "pycap-2.7.0-py3-none-any.whl", hash = "sha256:f7e1342b842b6c2af55e30afc54a7e49d6fa7ba446b3c614ec7d87f90ff3e58d"}, + {file = "pycap-2.7.0.tar.gz", hash = "sha256:02f7ad47cc3d729b126d34850039fc942683a8061a348abc5105b344a1823f44"}, ] [package.dependencies] @@ -2970,15 +3708,15 @@ requests = ">=2.20,<3.0" semantic-version = ">=2.8.5,<3.0.0" [package.extras] -data-science = ["pandas (>=1.3.4,<2.0.0)"] +data-science = ["pandas (>=2.0.0,<3.0.0)"] [[package]] name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, @@ -2986,23 +3724,23 @@ files = [ [[package]] name = "pycparser" -version = "2.22" +version = "2.23" description = "C parser in Python" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] [[package]] name = "pydocstyle" version = "6.3.0" description = "Python docstring style checker" -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, @@ -3012,15 +3750,15 @@ files = [ snowballstemmer = ">=2.2.0" [package.extras] -toml = ["tomli (>=1.2.3)"] +toml = ["tomli (>=1.2.3) ; python_version < \"3.11\""] [[package]] name = "pyfairdatatools" version = "0.1.3" description = "Tools for AI-READI" -category = "main" optional = false python-versions = ">=3.8,<4.0" +groups = ["main"] files = [ {file = "pyfairdatatools-0.1.3-py3-none-any.whl", hash = "sha256:1ee1cc6241dc3387c6299dd4308f0f956967be2d3afa4a5d4f074ea01eb76d76"}, {file = "pyfairdatatools-0.1.3.tar.gz", hash = "sha256:0b80da09f8e02d94cf717254c21fc50cf3404b81372272f27019571db9aeb047"}, @@ -3041,9 +3779,9 @@ validators = ">=0.20.0,<0.21.0" name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, @@ -3051,14 +3789,14 @@ files = [ [[package]] name = "pygments" -version = "2.19.1" +version = "2.19.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, - {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, + {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, + {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, ] [package.extras] @@ -3068,9 +3806,9 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyjwt" version = "2.10.1" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -3086,9 +3824,9 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pylint" version = "2.17.7" description = "python code static checker" -category = "dev" optional = false python-versions = ">=3.7.2" +groups = ["dev"] files = [ {file = "pylint-2.17.7-py3-none-any.whl", hash = "sha256:27a8d4c7ddc8c2f8c18aa0050148f89ffc09838142193fdbe98f172781a3ff87"}, {file = "pylint-2.17.7.tar.gz", hash = "sha256:f4fcac7ae74cfe36bc8451e931d8438e4a476c20314b1101c458ad0f05191fad"}, @@ -3115,9 +3853,9 @@ testutils = ["gitpython (>3)"] name = "pymdown-extensions" version = "10.4" description = "Extension pack for Python Markdown." -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pymdown_extensions-10.4-py3-none-any.whl", hash = "sha256:cfc28d6a09d19448bcbf8eee3ce098c7d17ff99f7bd3069db4819af181212037"}, {file = "pymdown_extensions-10.4.tar.gz", hash = "sha256:bc46f11749ecd4d6b71cf62396104b4a200bad3498cb0f5dad1b8502fe461a35"}, @@ -3134,9 +3872,10 @@ extra = ["pygments (>=2.12)"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" and python_version >= \"3.14\"" files = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, @@ -3157,13 +3896,58 @@ cffi = ">=1.4.1" docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] +[[package]] +name = "pynacl" +version = "1.6.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.14\" or platform_python_implementation == \"PyPy\"" +files = [ + {file = "pynacl-1.6.0-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:f46386c24a65383a9081d68e9c2de909b1834ec74ff3013271f1bca9c2d233eb"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:dea103a1afcbc333bc0e992e64233d360d393d1e63d0bc88554f572365664348"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:04f20784083014e265ad58c1b2dd562c3e35864b5394a14ab54f5d150ee9e53e"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bbcc4452a1eb10cd5217318c822fde4be279c9de8567f78bad24c773c21254f8"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51fed9fe1bec9e7ff9af31cd0abba179d0e984a2960c77e8e5292c7e9b7f7b5d"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:10d755cf2a455d8c0f8c767a43d68f24d163b8fe93ccfaabfa7bafd26be58d73"}, + {file = "pynacl-1.6.0-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:536703b8f90e911294831a7fbcd0c062b837f3ccaa923d92a6254e11178aaf42"}, + {file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6b08eab48c9669d515a344fb0ef27e2cbde847721e34bba94a343baa0f33f1f4"}, + {file = "pynacl-1.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5789f016e08e5606803161ba24de01b5a345d24590a80323379fc4408832d290"}, + {file = "pynacl-1.6.0-cp314-cp314t-win32.whl", hash = "sha256:4853c154dc16ea12f8f3ee4b7e763331876316cc3a9f06aeedf39bcdca8f9995"}, + {file = "pynacl-1.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:347dcddce0b4d83ed3f32fd00379c83c425abee5a9d2cd0a2c84871334eaff64"}, + {file = "pynacl-1.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2d6cd56ce4998cb66a6c112fda7b1fdce5266c9f05044fa72972613bef376d15"}, + {file = "pynacl-1.6.0-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:f4b3824920e206b4f52abd7de621ea7a44fd3cb5c8daceb7c3612345dfc54f2e"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:16dd347cdc8ae0b0f6187a2608c0af1c8b7ecbbe6b4a06bff8253c192f696990"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16c60daceee88d04f8d41d0a4004a7ed8d9a5126b997efd2933e08e93a3bd850"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25720bad35dfac34a2bcdd61d9e08d6bfc6041bebc7751d9c9f2446cf1e77d64"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8bfaa0a28a1ab718bad6239979a5a57a8d1506d0caf2fba17e524dbb409441cf"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:ef214b90556bb46a485b7da8258e59204c244b1b5b576fb71848819b468c44a7"}, + {file = "pynacl-1.6.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:49c336dd80ea54780bcff6a03ee1a476be1612423010472e60af83452aa0f442"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f3482abf0f9815e7246d461fab597aa179b7524628a4bc36f86a7dc418d2608d"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:140373378e34a1f6977e573033d1dd1de88d2a5d90ec6958c9485b2fd9f3eb90"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6b393bc5e5a0eb86bb85b533deb2d2c815666665f840a09e0aa3362bb6088736"}, + {file = "pynacl-1.6.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4a25cfede801f01e54179b8ff9514bd7b5944da560b7040939732d1804d25419"}, + {file = "pynacl-1.6.0-cp38-abi3-win32.whl", hash = "sha256:dcdeb41c22ff3c66eef5e63049abf7639e0db4edee57ba70531fc1b6b133185d"}, + {file = "pynacl-1.6.0-cp38-abi3-win_amd64.whl", hash = "sha256:cf831615cc16ba324240de79d925eacae8265b7691412ac6b24221db157f6bd1"}, + {file = "pynacl-1.6.0-cp38-abi3-win_arm64.whl", hash = "sha256:84709cea8f888e618c21ed9a0efdb1a59cc63141c403db8bf56c469b71ad56f2"}, + {file = "pynacl-1.6.0.tar.gz", hash = "sha256:cb36deafe6e2bce3b286e5d1f3e1c246e0ccdb8808ddb4550bb2792f2df298f2"}, +] + +[package.dependencies] +cffi = {version = ">=1.4.1", markers = "platform_python_implementation != \"PyPy\" and python_version < \"3.14\""} + +[package.extras] +docs = ["sphinx (<7)", "sphinx_rtd_theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=7.4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] + [[package]] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -3184,9 +3968,9 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-cov" version = "3.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, @@ -3201,14 +3985,14 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-describe" -version = "2.2.0" +version = "2.2.1" description = "Describe-style plugin for pytest" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "pytest-describe-2.2.0.tar.gz", hash = "sha256:39bb05eb90f2497d9ca342ef9a0b7fa5bada7e58505aec33f66d661d631955b7"}, - {file = "pytest_describe-2.2.0-py3-none-any.whl", hash = "sha256:bd9e2c73acb4b9522a8400823d98f5b6a081667d3bfd7243a8598336896b544d"}, + {file = "pytest_describe-2.2.1-py3-none-any.whl", hash = "sha256:b7ea0c995599cb6db050b928d5650afdced3629b98cbb71c091fbf9ae5443ed0"}, + {file = "pytest_describe-2.2.1.tar.gz", hash = "sha256:eb59307333508d700082a4a8aadba0f874f02c73602f7113071b0b41bfe183b7"}, ] [package.dependencies] @@ -3218,9 +4002,9 @@ pytest = ">=4.6,<9" name = "pytest-expecter" version = "3.0" description = "Better testing with expecter and pytest." -category = "dev" optional = false python-versions = ">=3.8,<4.0" +groups = ["dev"] files = [ {file = "pytest-expecter-3.0.tar.gz", hash = "sha256:be8f3e9f823af6d6713e3f552ed47560061a2fd243a78952180f5df61a2b76a4"}, {file = "pytest_expecter-3.0-py3-none-any.whl", hash = "sha256:98fe65ecc1ddb7ca29084dc68ec07983dbbdb20b566fd14140b0b5f4b7c84cc8"}, @@ -3230,9 +4014,9 @@ files = [ name = "pytest-random" version = "0.02" description = "py.test plugin to randomize tests" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "pytest-random-0.02.tar.gz", hash = "sha256:92f25db8c5d9ffc20d90b51997b914372d6955cb9cf1f6ead45b90514fc0eddd"}, ] @@ -3244,9 +4028,9 @@ pytest = ">=2.2.3" name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -3257,14 +4041,14 @@ six = ">=1.5" [[package]] name = "python-dotenv" -version = "1.0.1" +version = "1.2.1" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, + {file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"}, + {file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"}, ] [package.extras] @@ -3272,149 +4056,132 @@ cli = ["click (>=5.0)"] [[package]] name = "python-json-logger" -version = "3.2.1" +version = "4.0.0" description = "JSON Log Formatter for the Python Logging Package" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "python_json_logger-3.2.1-py3-none-any.whl", hash = "sha256:cdc17047eb5374bd311e748b42f99d71223f3b0e186f4206cc5d52aefe85b090"}, - {file = "python_json_logger-3.2.1.tar.gz", hash = "sha256:8eb0554ea17cb75b05d2848bc14fb02fbdbd9d6972120781b974380bfa162008"}, + {file = "python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2"}, + {file = "python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f"}, ] [package.extras] -dev = ["backports.zoneinfo", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec", "msgspec-python313-pre", "mypy", "orjson", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] - -[[package]] -name = "pytz" -version = "2025.1" -description = "World timezone definitions, modern and historical" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, - {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, -] - -[[package]] -name = "pywin32" -version = "308" -description = "Python for Window Extensions" -category = "dev" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, - {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, - {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, - {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, - {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, - {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, - {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, - {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, - {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, - {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, - {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, - {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, - {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, - {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, - {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, - {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, - {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, - {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, -] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] [[package]] name = "pywinpty" -version = "2.0.15" +version = "3.0.2" description = "Pseudo terminal support for Windows from Python." -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "os_name == \"nt\"" files = [ - {file = "pywinpty-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:8e7f5de756a615a38b96cd86fa3cd65f901ce54ce147a3179c45907fa11b4c4e"}, - {file = "pywinpty-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:9a6bcec2df2707aaa9d08b86071970ee32c5026e10bcc3cc5f6f391d85baf7ca"}, - {file = "pywinpty-2.0.15-cp312-cp312-win_amd64.whl", hash = "sha256:83a8f20b430bbc5d8957249f875341a60219a4e971580f2ba694fbfb54a45ebc"}, - {file = "pywinpty-2.0.15-cp313-cp313-win_amd64.whl", hash = "sha256:ab5920877dd632c124b4ed17bc6dd6ef3b9f86cd492b963ffdb1a67b85b0f408"}, - {file = "pywinpty-2.0.15-cp313-cp313t-win_amd64.whl", hash = "sha256:a4560ad8c01e537708d2790dbe7da7d986791de805d89dd0d3697ca59e9e4901"}, - {file = "pywinpty-2.0.15-cp39-cp39-win_amd64.whl", hash = "sha256:d261cd88fcd358cfb48a7ca0700db3e1c088c9c10403c9ebc0d8a8b57aa6a117"}, - {file = "pywinpty-2.0.15.tar.gz", hash = "sha256:312cf39153a8736c617d45ce8b6ad6cd2107de121df91c455b10ce6bba7a39b2"}, + {file = "pywinpty-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:65db57fd3387d71e8372b6a54269cbcd0f6dfa6d4616a29e0af749ec19f5c558"}, + {file = "pywinpty-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:327790d70e4c841ebd9d0f295a780177149aeb405bca44c7115a3de5c2054b23"}, + {file = "pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e"}, + {file = "pywinpty-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:18f78b81e4cfee6aabe7ea8688441d30247b73e52cd9657138015c5f4ee13a51"}, + {file = "pywinpty-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:663383ecfab7fc382cc97ea5c4f7f0bb32c2f889259855df6ea34e5df42d305b"}, + {file = "pywinpty-3.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:28297cecc37bee9f24d8889e47231972d6e9e84f7b668909de54f36ca785029a"}, + {file = "pywinpty-3.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:34b55ae9a1b671fe3eae071d86618110538e8eaad18fcb1531c0830b91a82767"}, + {file = "pywinpty-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:3962daf801bc38dd4de872108c424b5338c9a46c6efca5761854cd66370a9022"}, + {file = "pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004"}, ] [[package]] name = "pyyaml" -version = "6.0.2" +version = "6.0.3" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +groups = ["main", "dev"] +files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, + {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198"}, + {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0"}, + {file = "pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69"}, + {file = "pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e"}, + {file = "pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e"}, + {file = "pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00"}, + {file = "pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a"}, + {file = "pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4"}, + {file = "pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b"}, + {file = "pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196"}, + {file = "pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c"}, + {file = "pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e"}, + {file = "pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea"}, + {file = "pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b"}, + {file = "pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8"}, + {file = "pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5"}, + {file = "pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6"}, + {file = "pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be"}, + {file = "pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c"}, + {file = "pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac"}, + {file = "pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788"}, + {file = "pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764"}, + {file = "pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac"}, + {file = "pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3"}, + {file = "pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702"}, + {file = "pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065"}, + {file = "pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9"}, + {file = "pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da"}, + {file = "pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5"}, + {file = "pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926"}, + {file = "pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7"}, + {file = "pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0"}, + {file = "pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007"}, + {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] [[package]] name = "pyyaml-env-tag" -version = "0.1" -description = "A custom YAML tag for referencing environment variables in YAML files. " -category = "dev" +version = "1.1" +description = "A custom YAML tag for referencing environment variables in YAML files." optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, - {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, + {file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"}, + {file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"}, ] [package.dependencies] @@ -3422,121 +4189,104 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "26.2.1" +version = "27.1.0" description = "Python bindings for 0MQ" -category = "dev" optional = false -python-versions = ">=3.7" -files = [ - {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:f39d1227e8256d19899d953e6e19ed2ccb689102e6d85e024da5acf410f301eb"}, - {file = "pyzmq-26.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a23948554c692df95daed595fdd3b76b420a4939d7a8a28d6d7dea9711878641"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95f5728b367a042df146cec4340d75359ec6237beebf4a8f5cf74657c65b9257"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95f7b01b3f275504011cf4cf21c6b885c8d627ce0867a7e83af1382ebab7b3ff"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a00370a2ef2159c310e662c7c0f2d030f437f35f478bb8b2f70abd07e26b24"}, - {file = "pyzmq-26.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8531ed35dfd1dd2af95f5d02afd6545e8650eedbf8c3d244a554cf47d8924459"}, - {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cdb69710e462a38e6039cf17259d328f86383a06c20482cc154327968712273c"}, - {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e7eeaef81530d0b74ad0d29eec9997f1c9230c2f27242b8d17e0ee67662c8f6e"}, - {file = "pyzmq-26.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:361edfa350e3be1f987e592e834594422338d7174364763b7d3de5b0995b16f3"}, - {file = "pyzmq-26.2.1-cp310-cp310-win32.whl", hash = "sha256:637536c07d2fb6a354988b2dd1d00d02eb5dd443f4bbee021ba30881af1c28aa"}, - {file = "pyzmq-26.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:45fad32448fd214fbe60030aa92f97e64a7140b624290834cc9b27b3a11f9473"}, - {file = "pyzmq-26.2.1-cp310-cp310-win_arm64.whl", hash = "sha256:d9da0289d8201c8a29fd158aaa0dfe2f2e14a181fd45e2dc1fbf969a62c1d594"}, - {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:c059883840e634a21c5b31d9b9a0e2b48f991b94d60a811092bc37992715146a"}, - {file = "pyzmq-26.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed038a921df836d2f538e509a59cb638df3e70ca0fcd70d0bf389dfcdf784d2a"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9027a7fcf690f1a3635dc9e55e38a0d6602dbbc0548935d08d46d2e7ec91f454"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d75fcb00a1537f8b0c0bb05322bc7e35966148ffc3e0362f0369e44a4a1de99"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0019cc804ac667fb8c8eaecdb66e6d4a68acf2e155d5c7d6381a5645bd93ae4"}, - {file = "pyzmq-26.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f19dae58b616ac56b96f2e2290f2d18730a898a171f447f491cc059b073ca1fa"}, - {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f5eeeb82feec1fc5cbafa5ee9022e87ffdb3a8c48afa035b356fcd20fc7f533f"}, - {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:000760e374d6f9d1a3478a42ed0c98604de68c9e94507e5452951e598ebecfba"}, - {file = "pyzmq-26.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:817fcd3344d2a0b28622722b98500ae9c8bfee0f825b8450932ff19c0b15bebd"}, - {file = "pyzmq-26.2.1-cp311-cp311-win32.whl", hash = "sha256:88812b3b257f80444a986b3596e5ea5c4d4ed4276d2b85c153a6fbc5ca457ae7"}, - {file = "pyzmq-26.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:ef29630fde6022471d287c15c0a2484aba188adbfb978702624ba7a54ddfa6c1"}, - {file = "pyzmq-26.2.1-cp311-cp311-win_arm64.whl", hash = "sha256:f32718ee37c07932cc336096dc7403525301fd626349b6eff8470fe0f996d8d7"}, - {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:a6549ecb0041dafa55b5932dcbb6c68293e0bd5980b5b99f5ebb05f9a3b8a8f3"}, - {file = "pyzmq-26.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0250c94561f388db51fd0213cdccbd0b9ef50fd3c57ce1ac937bf3034d92d72e"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ee4297d9e4b34b5dc1dd7ab5d5ea2cbba8511517ef44104d2915a917a56dc8"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2a9cb17fd83b7a3a3009901aca828feaf20aa2451a8a487b035455a86549c09"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786dd8a81b969c2081b31b17b326d3a499ddd1856e06d6d79ad41011a25148da"}, - {file = "pyzmq-26.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2d88ba221a07fc2c5581565f1d0fe8038c15711ae79b80d9462e080a1ac30435"}, - {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c84c1297ff9f1cd2440da4d57237cb74be21fdfe7d01a10810acba04e79371a"}, - {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46d4ebafc27081a7f73a0f151d0c38d4291656aa134344ec1f3d0199ebfbb6d4"}, - {file = "pyzmq-26.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:91e2bfb8e9a29f709d51b208dd5f441dc98eb412c8fe75c24ea464734ccdb48e"}, - {file = "pyzmq-26.2.1-cp312-cp312-win32.whl", hash = "sha256:4a98898fdce380c51cc3e38ebc9aa33ae1e078193f4dc641c047f88b8c690c9a"}, - {file = "pyzmq-26.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:a0741edbd0adfe5f30bba6c5223b78c131b5aa4a00a223d631e5ef36e26e6d13"}, - {file = "pyzmq-26.2.1-cp312-cp312-win_arm64.whl", hash = "sha256:e5e33b1491555843ba98d5209439500556ef55b6ab635f3a01148545498355e5"}, - {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:099b56ef464bc355b14381f13355542e452619abb4c1e57a534b15a106bf8e23"}, - {file = "pyzmq-26.2.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:651726f37fcbce9f8dd2a6dab0f024807929780621890a4dc0c75432636871be"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57dd4d91b38fa4348e237a9388b4423b24ce9c1695bbd4ba5a3eada491e09399"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d51a7bfe01a48e1064131f3416a5439872c533d756396be2b39e3977b41430f9"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7154d228502e18f30f150b7ce94f0789d6b689f75261b623f0fdc1eec642aab"}, - {file = "pyzmq-26.2.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:f1f31661a80cc46aba381bed475a9135b213ba23ca7ff6797251af31510920ce"}, - {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:290c96f479504439b6129a94cefd67a174b68ace8a8e3f551b2239a64cfa131a"}, - {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f2c307fbe86e18ab3c885b7e01de942145f539165c3360e2af0f094dd440acd9"}, - {file = "pyzmq-26.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:b314268e716487bfb86fcd6f84ebbe3e5bec5fac75fdf42bc7d90fdb33f618ad"}, - {file = "pyzmq-26.2.1-cp313-cp313-win32.whl", hash = "sha256:edb550616f567cd5603b53bb52a5f842c0171b78852e6fc7e392b02c2a1504bb"}, - {file = "pyzmq-26.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:100a826a029c8ef3d77a1d4c97cbd6e867057b5806a7276f2bac1179f893d3bf"}, - {file = "pyzmq-26.2.1-cp313-cp313-win_arm64.whl", hash = "sha256:6991ee6c43e0480deb1b45d0c7c2bac124a6540cba7db4c36345e8e092da47ce"}, - {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:25e720dba5b3a3bb2ad0ad5d33440babd1b03438a7a5220511d0c8fa677e102e"}, - {file = "pyzmq-26.2.1-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:9ec6abfb701437142ce9544bd6a236addaf803a32628d2260eb3dbd9a60e2891"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e1eb9d2bfdf5b4e21165b553a81b2c3bd5be06eeddcc4e08e9692156d21f1f6"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90dc731d8e3e91bcd456aa7407d2eba7ac6f7860e89f3766baabb521f2c1de4a"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6a93d684278ad865fc0b9e89fe33f6ea72d36da0e842143891278ff7fd89c3"}, - {file = "pyzmq-26.2.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:c1bb37849e2294d519117dd99b613c5177934e5c04a5bb05dd573fa42026567e"}, - {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:632a09c6d8af17b678d84df442e9c3ad8e4949c109e48a72f805b22506c4afa7"}, - {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:fc409c18884eaf9ddde516d53af4f2db64a8bc7d81b1a0c274b8aa4e929958e8"}, - {file = "pyzmq-26.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:17f88622b848805d3f6427ce1ad5a2aa3cf61f12a97e684dab2979802024d460"}, - {file = "pyzmq-26.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ef584f13820d2629326fe20cc04069c21c5557d84c26e277cfa6235e523b10f"}, - {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:160194d1034902937359c26ccfa4e276abffc94937e73add99d9471e9f555dd6"}, - {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:574b285150afdbf0a0424dddf7ef9a0d183988eb8d22feacb7160f7515e032cb"}, - {file = "pyzmq-26.2.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44dba28c34ce527cf687156c81f82bf1e51f047838d5964f6840fd87dfecf9fe"}, - {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9fbdb90b85c7624c304f72ec7854659a3bd901e1c0ffb2363163779181edeb68"}, - {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a7ad34a2921e8f76716dc7205c9bf46a53817e22b9eec2e8a3e08ee4f4a72468"}, - {file = "pyzmq-26.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:866c12b7c90dd3a86983df7855c6f12f9407c8684db6aa3890fc8027462bda82"}, - {file = "pyzmq-26.2.1-cp37-cp37m-win32.whl", hash = "sha256:eeb37f65350d5c5870517f02f8bbb2ac0fbec7b416c0f4875219fef305a89a45"}, - {file = "pyzmq-26.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4eb3197f694dfb0ee6af29ef14a35f30ae94ff67c02076eef8125e2d98963cd0"}, - {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:36d4e7307db7c847fe37413f333027d31c11d5e6b3bacbb5022661ac635942ba"}, - {file = "pyzmq-26.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1c6ae0e95d0a4b0cfe30f648a18e764352d5415279bdf34424decb33e79935b8"}, - {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5b4fc44f5360784cc02392f14235049665caaf7c0fe0b04d313e763d3338e463"}, - {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:51431f6b2750eb9b9d2b2952d3cc9b15d0215e1b8f37b7a3239744d9b487325d"}, - {file = "pyzmq-26.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbc78ae2065042de48a65f1421b8af6b76a0386bb487b41955818c3c1ce7bed"}, - {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d14f50d61a89b0925e4d97a0beba6053eb98c426c5815d949a43544f05a0c7ec"}, - {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:004837cb958988c75d8042f5dac19a881f3d9b3b75b2f574055e22573745f841"}, - {file = "pyzmq-26.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b2007f28ce1b8acebdf4812c1aab997a22e57d6a73b5f318b708ef9bcabbe95"}, - {file = "pyzmq-26.2.1-cp38-cp38-win32.whl", hash = "sha256:269c14904da971cb5f013100d1aaedb27c0a246728c341d5d61ddd03f463f2f3"}, - {file = "pyzmq-26.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:31fff709fef3b991cfe7189d2cfe0c413a1d0e82800a182cfa0c2e3668cd450f"}, - {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a4bffcadfd40660f26d1b3315a6029fd4f8f5bf31a74160b151f5c577b2dc81b"}, - {file = "pyzmq-26.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e76ad4729c2f1cf74b6eb1bdd05f6aba6175999340bd51e6caee49a435a13bf5"}, - {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8b0f5bab40a16e708e78a0c6ee2425d27e1a5d8135c7a203b4e977cee37eb4aa"}, - {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8e47050412f0ad3a9b2287779758073cbf10e460d9f345002d4779e43bb0136"}, - {file = "pyzmq-26.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f18ce33f422d119b13c1363ed4cce245b342b2c5cbbb76753eabf6aa6f69c7d"}, - {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ceb0d78b7ef106708a7e2c2914afe68efffc0051dc6a731b0dbacd8b4aee6d68"}, - {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ebdd96bd637fd426d60e86a29ec14b8c1ab64b8d972f6a020baf08a30d1cf46"}, - {file = "pyzmq-26.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03719e424150c6395b9513f53a5faadcc1ce4b92abdf68987f55900462ac7eec"}, - {file = "pyzmq-26.2.1-cp39-cp39-win32.whl", hash = "sha256:ef5479fac31df4b304e96400fc67ff08231873ee3537544aa08c30f9d22fce38"}, - {file = "pyzmq-26.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:f92a002462154c176dac63a8f1f6582ab56eb394ef4914d65a9417f5d9fde218"}, - {file = "pyzmq-26.2.1-cp39-cp39-win_arm64.whl", hash = "sha256:1fd4b3efc6f62199886440d5e27dd3ccbcb98dfddf330e7396f1ff421bfbb3c2"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:380816d298aed32b1a97b4973a4865ef3be402a2e760204509b52b6de79d755d"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97cbb368fd0debdbeb6ba5966aa28e9a1ae3396c7386d15569a6ca4be4572b99"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf7b5942c6b0dafcc2823ddd9154f419147e24f8df5b41ca8ea40a6db90615c"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fe6e28a8856aea808715f7a4fc11f682b9d29cac5d6262dd8fe4f98edc12d53"}, - {file = "pyzmq-26.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bd8fdee945b877aa3bffc6a5a8816deb048dab0544f9df3731ecd0e54d8c84c9"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ee7152f32c88e0e1b5b17beb9f0e2b14454235795ef68c0c120b6d3d23d12833"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:baa1da72aecf6a490b51fba7a51f1ce298a1e0e86d0daef8265c8f8f9848eb77"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:49135bb327fca159262d8fd14aa1f4a919fe071b04ed08db4c7c37d2f0647162"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bacc1a10c150d58e8a9ee2b2037a70f8d903107e0f0b6e079bf494f2d09c091"}, - {file = "pyzmq-26.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:09dac387ce62d69bec3f06d51610ca1d660e7849eb45f68e38e7f5cf1f49cbcb"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:70b3a46ecd9296e725ccafc17d732bfc3cdab850b54bd913f843a0a54dfb2c04"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:59660e15c797a3b7a571c39f8e0b62a1f385f98ae277dfe95ca7eaf05b5a0f12"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0f50db737d688e96ad2a083ad2b453e22865e7e19c7f17d17df416e91ddf67eb"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a003200b6cd64e89b5725ff7e284a93ab24fd54bbac8b4fa46b1ed57be693c27"}, - {file = "pyzmq-26.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f9ba5def063243793dec6603ad1392f735255cbc7202a3a484c14f99ec290705"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1238c2448c58b9c8d6565579393148414a42488a5f916b3f322742e561f6ae0d"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eddb3784aed95d07065bcf94d07e8c04024fdb6b2386f08c197dfe6b3528fda"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0f19c2097fffb1d5b07893d75c9ee693e9cbc809235cf3f2267f0ef6b015f24"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0995fd3530f2e89d6b69a2202e340bbada3191014352af978fa795cb7a446331"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7c6160fe513654e65665332740f63de29ce0d165e053c0c14a161fa60dd0da01"}, - {file = "pyzmq-26.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8ec8e3aea6146b761d6c57fcf8f81fcb19f187afecc19bf1701a48db9617a217"}, - {file = "pyzmq-26.2.1.tar.gz", hash = "sha256:17d72a74e5e9ff3829deb72897a175333d3ef5b5413948cae3cf7ebf0b02ecca"}, +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pyzmq-27.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:508e23ec9bc44c0005c4946ea013d9317ae00ac67778bd47519fdf5a0e930ff4"}, + {file = "pyzmq-27.1.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:507b6f430bdcf0ee48c0d30e734ea89ce5567fd7b8a0f0044a369c176aa44556"}, + {file = "pyzmq-27.1.0-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf7b38f9fd7b81cb6d9391b2946382c8237fd814075c6aa9c3b746d53076023b"}, + {file = "pyzmq-27.1.0-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03ff0b279b40d687691a6217c12242ee71f0fba28bf8626ff50e3ef0f4410e1e"}, + {file = "pyzmq-27.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:677e744fee605753eac48198b15a2124016c009a11056f93807000ab11ce6526"}, + {file = "pyzmq-27.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd2fec2b13137416a1c5648b7009499bcc8fea78154cd888855fa32514f3dad1"}, + {file = "pyzmq-27.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:08e90bb4b57603b84eab1d0ca05b3bbb10f60c1839dc471fc1c9e1507bef3386"}, + {file = "pyzmq-27.1.0-cp310-cp310-win32.whl", hash = "sha256:a5b42d7a0658b515319148875fcb782bbf118dd41c671b62dae33666c2213bda"}, + {file = "pyzmq-27.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0bb87227430ee3aefcc0ade2088100e528d5d3298a0a715a64f3d04c60ba02f"}, + {file = "pyzmq-27.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:9a916f76c2ab8d045b19f2286851a38e9ac94ea91faf65bd64735924522a8b32"}, + {file = "pyzmq-27.1.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:226b091818d461a3bef763805e75685e478ac17e9008f49fce2d3e52b3d58b86"}, + {file = "pyzmq-27.1.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0790a0161c281ca9723f804871b4027f2e8b5a528d357c8952d08cd1a9c15581"}, + {file = "pyzmq-27.1.0-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c895a6f35476b0c3a54e3eb6ccf41bf3018de937016e6e18748317f25d4e925f"}, + {file = "pyzmq-27.1.0-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bbf8d3630bf96550b3be8e1fc0fea5cbdc8d5466c1192887bd94869da17a63e"}, + {file = "pyzmq-27.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15c8bd0fe0dabf808e2d7a681398c4e5ded70a551ab47482067a572c054c8e2e"}, + {file = "pyzmq-27.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bafcb3dd171b4ae9f19ee6380dfc71ce0390fefaf26b504c0e5f628d7c8c54f2"}, + {file = "pyzmq-27.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e829529fcaa09937189178115c49c504e69289abd39967cd8a4c215761373394"}, + {file = "pyzmq-27.1.0-cp311-cp311-win32.whl", hash = "sha256:6df079c47d5902af6db298ec92151db82ecb557af663098b92f2508c398bb54f"}, + {file = "pyzmq-27.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:190cbf120fbc0fc4957b56866830def56628934a9d112aec0e2507aa6a032b97"}, + {file = "pyzmq-27.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:eca6b47df11a132d1745eb3b5b5e557a7dae2c303277aa0e69c6ba91b8736e07"}, + {file = "pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc"}, + {file = "pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113"}, + {file = "pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233"}, + {file = "pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31"}, + {file = "pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28"}, + {file = "pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856"}, + {file = "pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496"}, + {file = "pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd"}, + {file = "pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf"}, + {file = "pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f"}, + {file = "pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5"}, + {file = "pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6"}, + {file = "pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7"}, + {file = "pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05"}, + {file = "pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9"}, + {file = "pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128"}, + {file = "pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39"}, + {file = "pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97"}, + {file = "pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db"}, + {file = "pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c"}, + {file = "pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2"}, + {file = "pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e"}, + {file = "pyzmq-27.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ce980af330231615756acd5154f29813d553ea555485ae712c491cd483df6b7a"}, + {file = "pyzmq-27.1.0-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1779be8c549e54a1c38f805e56d2a2e5c009d26de10921d7d51cfd1c8d4632ea"}, + {file = "pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7200bb0f03345515df50d99d3db206a0a6bee1955fbb8c453c76f5bf0e08fb96"}, + {file = "pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01c0e07d558b06a60773744ea6251f769cd79a41a97d11b8bf4ab8f034b0424d"}, + {file = "pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:80d834abee71f65253c91540445d37c4c561e293ba6e741b992f20a105d69146"}, + {file = "pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:544b4e3b7198dde4a62b8ff6685e9802a9a1ebf47e77478a5eb88eca2a82f2fd"}, + {file = "pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cedc4c68178e59a4046f97eca31b148ddcf51e88677de1ef4e78cf06c5376c9a"}, + {file = "pyzmq-27.1.0-cp314-cp314t-win32.whl", hash = "sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92"}, + {file = "pyzmq-27.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0"}, + {file = "pyzmq-27.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7"}, + {file = "pyzmq-27.1.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:18339186c0ed0ce5835f2656cdfb32203125917711af64da64dbaa3d949e5a1b"}, + {file = "pyzmq-27.1.0-cp38-cp38-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:753d56fba8f70962cd8295fb3edb40b9b16deaa882dd2b5a3a2039f9ff7625aa"}, + {file = "pyzmq-27.1.0-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b721c05d932e5ad9ff9344f708c96b9e1a485418c6618d765fca95d4daacfbef"}, + {file = "pyzmq-27.1.0-cp38-cp38-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be883ff3d722e6085ee3f4afc057a50f7f2e0c72d289fd54df5706b4e3d3a50"}, + {file = "pyzmq-27.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b2e592db3a93128daf567de9650a2f3859017b3f7a66bc4ed6e4779d6034976f"}, + {file = "pyzmq-27.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad68808a61cbfbbae7ba26d6233f2a4aa3b221de379ce9ee468aa7a83b9c36b0"}, + {file = "pyzmq-27.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e2687c2d230e8d8584fbea433c24382edfeda0c60627aca3446aa5e58d5d1831"}, + {file = "pyzmq-27.1.0-cp38-cp38-win32.whl", hash = "sha256:a1aa0ee920fb3825d6c825ae3f6c508403b905b698b6460408ebd5bb04bbb312"}, + {file = "pyzmq-27.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:df7cd397ece96cf20a76fae705d40efbab217d217897a5053267cd88a700c266"}, + {file = "pyzmq-27.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:96c71c32fff75957db6ae33cd961439f386505c6e6b377370af9b24a1ef9eafb"}, + {file = "pyzmq-27.1.0-cp39-cp39-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:49d3980544447f6bd2968b6ac913ab963a49dcaa2d4a2990041f16057b04c429"}, + {file = "pyzmq-27.1.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:849ca054d81aa1c175c49484afaaa5db0622092b5eccb2055f9f3bb8f703782d"}, + {file = "pyzmq-27.1.0-cp39-cp39-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3970778e74cb7f85934d2b926b9900e92bfe597e62267d7499acc39c9c28e345"}, + {file = "pyzmq-27.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:da96ecdcf7d3919c3be2de91a8c513c186f6762aa6cf7c01087ed74fad7f0968"}, + {file = "pyzmq-27.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9541c444cfe1b1c0156c5c86ece2bb926c7079a18e7b47b0b1b3b1b875e5d098"}, + {file = "pyzmq-27.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e30a74a39b93e2e1591b58eb1acef4902be27c957a8720b0e368f579b82dc22f"}, + {file = "pyzmq-27.1.0-cp39-cp39-win32.whl", hash = "sha256:b1267823d72d1e40701dcba7edc45fd17f71be1285557b7fe668887150a14b78"}, + {file = "pyzmq-27.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c996ded912812a2fcd7ab6574f4ad3edc27cb6510349431e4930d4196ade7db"}, + {file = "pyzmq-27.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:346e9ba4198177a07e7706050f35d733e08c1c1f8ceacd5eb6389d653579ffbc"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c17e03cbc9312bee223864f1a2b13a99522e0dc9f7c5df0177cd45210ac286e6"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f328d01128373cb6763823b2b4e7f73bdf767834268c565151eacb3b7a392f90"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c1790386614232e1b3a40a958454bdd42c6d1811837b15ddbb052a032a43f62"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:448f9cb54eb0cee4732b46584f2710c8bc178b0e5371d9e4fc8125201e413a74"}, + {file = "pyzmq-27.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:05b12f2d32112bf8c95ef2e74ec4f1d4beb01f8b5e703b38537f8849f92cb9ba"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:18770c8d3563715387139060d37859c02ce40718d1faf299abddcdcc6a649066"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ac25465d42f92e990f8d8b0546b01c391ad431c3bf447683fdc40565941d0604"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53b40f8ae006f2734ee7608d59ed661419f087521edbfc2149c3932e9c14808c"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f605d884e7c8be8fe1aa94e0a783bf3f591b84c24e4bc4f3e7564c82ac25e271"}, + {file = "pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:50081a4e98472ba9f5a02850014b4c9b629da6710f8f14f3b15897c666a28f1b"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:510869f9df36ab97f89f4cff9d002a89ac554c7ac9cadd87d444aa4cf66abd27"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1f8426a01b1c4098a750973c37131cf585f61c7911d735f729935a0c701b68d3"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726b6a502f2e34c6d2ada5e702929586d3ac948a4dbbb7fed9854ec8c0466027"}, + {file = "pyzmq-27.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:bd67e7c8f4654bef471c0b1ca6614af0b5202a790723a58b79d9584dc8022a78"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:722ea791aa233ac0a819fc2c475e1292c76930b31f1d828cb61073e2fe5e208f"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:01f9437501886d3a1dd4b02ef59fb8cc384fa718ce066d52f175ee49dd5b7ed8"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4a19387a3dddcc762bfd2f570d14e2395b2c9701329b266f83dd87a2b3cbd381"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c618fbcd069e3a29dcd221739cacde52edcc681f041907867e0f5cc7e85f172"}, + {file = "pyzmq-27.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff8d114d14ac671d88c89b9224c63d6c4e5a613fe8acd5594ce53d752a3aafe9"}, + {file = "pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540"}, ] [package.dependencies] @@ -3544,18 +4294,19 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "redis" -version = "5.2.1" +version = "5.3.1" description = "Python client for Redis database and key-value store" -category = "main" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, - {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, + {file = "redis-5.3.1-py3-none-any.whl", hash = "sha256:dc1909bd24669cc31b5f67a039700b16ec30571096c5f1f0d9d2324bff31af97"}, + {file = "redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c"}, ] [package.dependencies] async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} +PyJWT = ">=2.9.0" [package.extras] hiredis = ["hiredis (>=3.0.0)"] @@ -3563,14 +4314,14 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)" [[package]] name = "referencing" -version = "0.36.2" +version = "0.37.0" description = "JSON Referencing + Python" -category = "main" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" +groups = ["main", "dev"] files = [ - {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, - {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, + {file = "referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231"}, + {file = "referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8"}, ] [package.dependencies] @@ -3580,19 +4331,19 @@ typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" -version = "2.32.3" +version = "2.32.5" description = "Python HTTP for Humans." -category = "main" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -3600,32 +4351,13 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -category = "main" -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - [[package]] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, @@ -3638,188 +4370,218 @@ six = "*" name = "rfc3986-validator" version = "0.1.1" description = "Pure python rfc3986 validator" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, ] [[package]] -name = "rpds-py" -version = "0.22.3" -description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" +name = "rfc3987-syntax" +version = "1.1.0" +description = "Helper functions to syntactically validate strings according to RFC 3987." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, - {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, - {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, - {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, - {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, - {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, - {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, - {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, - {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, - {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, - {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, - {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, - {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, + {file = "rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f"}, + {file = "rfc3987_syntax-1.1.0.tar.gz", hash = "sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d"}, +] + +[package.dependencies] +lark = ">=1.2.2" + +[package.extras] +testing = ["pytest (>=8.3.5)"] + +[[package]] +name = "rpds-py" +version = "0.28.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.10" +groups = ["main", "dev"] +files = [ + {file = "rpds_py-0.28.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7b6013db815417eeb56b2d9d7324e64fcd4fa289caeee6e7a78b2e11fc9b438a"}, + {file = "rpds_py-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a4c6b05c685c0c03f80dabaeb73e74218c49deea965ca63f76a752807397207"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4794c6c3fbe8f9ac87699b131a1f26e7b4abcf6d828da46a3a52648c7930eba"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e8456b6ee5527112ff2354dd9087b030e3429e43a74f480d4a5ca79d269fd85"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:beb880a9ca0a117415f241f66d56025c02037f7c4efc6fe59b5b8454f1eaa50d"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6897bebb118c44b38c9cb62a178e09f1593c949391b9a1a6fe777ccab5934ee7"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b553dd06e875249fd43efd727785efb57a53180e0fde321468222eabbeaafa"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:f0b2044fdddeea5b05df832e50d2a06fe61023acb44d76978e1b060206a8a476"}, + {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05cf1e74900e8da73fa08cc76c74a03345e5a3e37691d07cfe2092d7d8e27b04"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:efd489fec7c311dae25e94fe7eeda4b3d06be71c68f2cf2e8ef990ffcd2cd7e8"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ada7754a10faacd4f26067e62de52d6af93b6d9542f0df73c57b9771eb3ba9c4"}, + {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c2a34fd26588949e1e7977cfcbb17a9a42c948c100cab890c6d8d823f0586457"}, + {file = "rpds_py-0.28.0-cp310-cp310-win32.whl", hash = "sha256:f9174471d6920cbc5e82a7822de8dfd4dcea86eb828b04fc8c6519a77b0ee51e"}, + {file = "rpds_py-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:6e32dd207e2c4f8475257a3540ab8a93eff997abfa0a3fdb287cae0d6cd874b8"}, + {file = "rpds_py-0.28.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:03065002fd2e287725d95fbc69688e0c6daf6c6314ba38bdbaa3895418e09296"}, + {file = "rpds_py-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28ea02215f262b6d078daec0b45344c89e161eab9526b0d898221d96fdda5f27"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dbade8fbf30bcc551cb352376c0ad64b067e4fc56f90e22ba70c3ce205988c"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c03002f54cc855860bfdc3442928ffdca9081e73b5b382ed0b9e8efe6e5e205"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9699fa7990368b22032baf2b2dce1f634388e4ffc03dfefaaac79f4695edc95"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9b06fe1a75e05e0713f06ea0c89ecb6452210fd60e2f1b6ddc1067b990e08d9"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9f83e7b326a3f9ec3ef84cda98fb0a74c7159f33e692032233046e7fd15da2"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:0d3259ea9ad8743a75a43eb7819324cdab393263c91be86e2d1901ee65c314e0"}, + {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a7548b345f66f6695943b4ef6afe33ccd3f1b638bd9afd0f730dd255c249c9e"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9a40040aa388b037eb39416710fbcce9443498d2eaab0b9b45ae988b53f5c67"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f60c7ea34e78c199acd0d3cda37a99be2c861dd2b8cf67399784f70c9f8e57d"}, + {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1571ae4292649100d743b26d5f9c63503bb1fedf538a8f29a98dce2d5ba6b4e6"}, + {file = "rpds_py-0.28.0-cp311-cp311-win32.whl", hash = "sha256:5cfa9af45e7c1140af7321fa0bef25b386ee9faa8928c80dc3a5360971a29e8c"}, + {file = "rpds_py-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd8d86b5d29d1b74100982424ba53e56033dc47720a6de9ba0259cf81d7cecaa"}, + {file = "rpds_py-0.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e27d3a5709cc2b3e013bf93679a849213c79ae0573f9b894b284b55e729e120"}, + {file = "rpds_py-0.28.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6b4f28583a4f247ff60cd7bdda83db8c3f5b05a7a82ff20dd4b078571747708f"}, + {file = "rpds_py-0.28.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d678e91b610c29c4b3d52a2c148b641df2b4676ffe47c59f6388d58b99cdc424"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e819e0e37a44a78e1383bf1970076e2ccc4dc8c2bbaa2f9bd1dc987e9afff628"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5ee514e0f0523db5d3fb171f397c54875dbbd69760a414dccf9d4d7ad628b5bd"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3fa06d27fdcee47f07a39e02862da0100cb4982508f5ead53ec533cd5fe55e"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46959ef2e64f9e4a41fc89aa20dbca2b85531f9a72c21099a3360f35d10b0d5a"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8455933b4bcd6e83fde3fefc987a023389c4b13f9a58c8d23e4b3f6d13f78c84"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ad50614a02c8c2962feebe6012b52f9802deec4263946cddea37aaf28dd25a66"}, + {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5deca01b271492553fdb6c7fd974659dce736a15bae5dad7ab8b93555bceb28"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:735f8495a13159ce6a0d533f01e8674cec0c57038c920495f87dcb20b3ddb48a"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:961ca621ff10d198bbe6ba4957decca61aa2a0c56695384c1d6b79bf61436df5"}, + {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2374e16cc9131022e7d9a8f8d65d261d9ba55048c78f3b6e017971a4f5e6353c"}, + {file = "rpds_py-0.28.0-cp312-cp312-win32.whl", hash = "sha256:d15431e334fba488b081d47f30f091e5d03c18527c325386091f31718952fe08"}, + {file = "rpds_py-0.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:a410542d61fc54710f750d3764380b53bf09e8c4edbf2f9141a82aa774a04f7c"}, + {file = "rpds_py-0.28.0-cp312-cp312-win_arm64.whl", hash = "sha256:1f0cfd1c69e2d14f8c892b893997fa9a60d890a0c8a603e88dca4955f26d1edd"}, + {file = "rpds_py-0.28.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e9e184408a0297086f880556b6168fa927d677716f83d3472ea333b42171ee3b"}, + {file = "rpds_py-0.28.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:edd267266a9b0448f33dc465a97cfc5d467594b600fe28e7fa2f36450e03053a"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85beb8b3f45e4e32f6802fb6cd6b17f615ef6c6a52f265371fb916fae02814aa"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d2412be8d00a1b895f8ad827cc2116455196e20ed994bb704bf138fe91a42724"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf128350d384b777da0e68796afdcebc2e9f63f0e9f242217754e647f6d32491"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2036d09b363aa36695d1cc1a97b36865597f4478470b0697b5ee9403f4fe399"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e1e9be4fa6305a16be628959188e4fd5cd6f1b0e724d63c6d8b2a8adf74ea6"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0a403460c9dd91a7f23fc3188de6d8977f1d9603a351d5db6cf20aaea95b538d"}, + {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7366b6553cdc805abcc512b849a519167db8f5e5c3472010cd1228b224265cb"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b43c6a3726efd50f18d8120ec0551241c38785b68952d240c45ea553912ac41"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0cb7203c7bc69d7c1585ebb33a2e6074492d2fc21ad28a7b9d40457ac2a51ab7"}, + {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7a52a5169c664dfb495882adc75c304ae1d50df552fbd68e100fdc719dee4ff9"}, + {file = "rpds_py-0.28.0-cp313-cp313-win32.whl", hash = "sha256:2e42456917b6687215b3e606ab46aa6bca040c77af7df9a08a6dcfe8a4d10ca5"}, + {file = "rpds_py-0.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:e0a0311caedc8069d68fc2bf4c9019b58a2d5ce3cd7cb656c845f1615b577e1e"}, + {file = "rpds_py-0.28.0-cp313-cp313-win_arm64.whl", hash = "sha256:04c1b207ab8b581108801528d59ad80aa83bb170b35b0ddffb29c20e411acdc1"}, + {file = "rpds_py-0.28.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f296ea3054e11fc58ad42e850e8b75c62d9a93a9f981ad04b2e5ae7d2186ff9c"}, + {file = "rpds_py-0.28.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5a7306c19b19005ad98468fcefeb7100b19c79fc23a5f24a12e06d91181193fa"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d9b86aa501fed9862a443c5c3116f6ead8bc9296185f369277c42542bd646b"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5bbc701eff140ba0e872691d573b3d5d30059ea26e5785acba9132d10c8c31d"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5690671cd672a45aa8616d7374fdf334a1b9c04a0cac3c854b1136e92374fe"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f1d92ecea4fa12f978a367c32a5375a1982834649cdb96539dcdc12e609ab1a"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d252db6b1a78d0a3928b6190156042d54c93660ce4d98290d7b16b5296fb7cc"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d61b355c3275acb825f8777d6c4505f42b5007e357af500939d4a35b19177259"}, + {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acbe5e8b1026c0c580d0321c8aae4b0a1e1676861d48d6e8c6586625055b606a"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8aa23b6f0fc59b85b4c7d89ba2965af274346f738e8d9fc2455763602e62fd5f"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7b14b0c680286958817c22d76fcbca4800ddacef6f678f3a7c79a1fe7067fe37"}, + {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bcf1d210dfee61a6c86551d67ee1031899c0fdbae88b2d44a569995d43797712"}, + {file = "rpds_py-0.28.0-cp313-cp313t-win32.whl", hash = "sha256:3aa4dc0fdab4a7029ac63959a3ccf4ed605fee048ba67ce89ca3168da34a1342"}, + {file = "rpds_py-0.28.0-cp313-cp313t-win_amd64.whl", hash = "sha256:7b7d9d83c942855e4fdcfa75d4f96f6b9e272d42fffcb72cd4bb2577db2e2907"}, + {file = "rpds_py-0.28.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:dcdcb890b3ada98a03f9f2bb108489cdc7580176cb73b4f2d789e9a1dac1d472"}, + {file = "rpds_py-0.28.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f274f56a926ba2dc02976ca5b11c32855cbd5925534e57cfe1fda64e04d1add2"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe0438ac4a29a520ea94c8c7f1754cdd8feb1bc490dfda1bfd990072363d527"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a358a32dd3ae50e933347889b6af9a1bdf207ba5d1a3f34e1a38cd3540e6733"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e80848a71c78aa328fefaba9c244d588a342c8e03bda518447b624ea64d1ff56"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f586db2e209d54fe177e58e0bc4946bea5fb0102f150b1b2f13de03e1f0976f8"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae8ee156d6b586e4292491e885d41483136ab994e719a13458055bec14cf370"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:a805e9b3973f7e27f7cab63a6b4f61d90f2e5557cff73b6e97cd5b8540276d3d"}, + {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5d3fd16b6dc89c73a4da0b4ac8b12a7ecc75b2864b95c9e5afed8003cb50a728"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6796079e5d24fdaba6d49bda28e2c47347e89834678f2bc2c1b4fc1489c0fb01"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:76500820c2af232435cbe215e3324c75b950a027134e044423f59f5b9a1ba515"}, + {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bbdc5640900a7dbf9dd707fe6388972f5bbd883633eb68b76591044cfe346f7e"}, + {file = "rpds_py-0.28.0-cp314-cp314-win32.whl", hash = "sha256:adc8aa88486857d2b35d75f0640b949759f79dc105f50aa2c27816b2e0dd749f"}, + {file = "rpds_py-0.28.0-cp314-cp314-win_amd64.whl", hash = "sha256:66e6fa8e075b58946e76a78e69e1a124a21d9a48a5b4766d15ba5b06869d1fa1"}, + {file = "rpds_py-0.28.0-cp314-cp314-win_arm64.whl", hash = "sha256:a6fe887c2c5c59413353b7c0caff25d0e566623501ccfff88957fa438a69377d"}, + {file = "rpds_py-0.28.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7a69df082db13c7070f7b8b1f155fa9e687f1d6aefb7b0e3f7231653b79a067b"}, + {file = "rpds_py-0.28.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b1cde22f2c30ebb049a9e74c5374994157b9b70a16147d332f89c99c5960737a"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5338742f6ba7a51012ea470bd4dc600a8c713c0c72adaa0977a1b1f4327d6592"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1460ebde1bcf6d496d80b191d854adedcc619f84ff17dc1c6d550f58c9efbba"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3eb248f2feba84c692579257a043a7699e28a77d86c77b032c1d9fbb3f0219c"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3bbba5def70b16cd1c1d7255666aad3b290fbf8d0fe7f9f91abafb73611a91"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3114f4db69ac5a1f32e7e4d1cbbe7c8f9cf8217f78e6e002cedf2d54c2a548ed"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:4b0cb8a906b1a0196b863d460c0222fb8ad0f34041568da5620f9799b83ccf0b"}, + {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf681ac76a60b667106141e11a92a3330890257e6f559ca995fbb5265160b56e"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1e8ee6413cfc677ce8898d9cde18cc3a60fc2ba756b0dec5b71eb6eb21c49fa1"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b3072b16904d0b5572a15eb9d31c1954e0d3227a585fc1351aa9878729099d6c"}, + {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b670c30fd87a6aec281c3c9896d3bae4b205fd75d79d06dc87c2503717e46092"}, + {file = "rpds_py-0.28.0-cp314-cp314t-win32.whl", hash = "sha256:8014045a15b4d2b3476f0a287fcc93d4f823472d7d1308d47884ecac9e612be3"}, + {file = "rpds_py-0.28.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a4e59c90d9c27c561eb3160323634a9ff50b04e4f7820600a2beb0ac90db578"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f5e7101145427087e493b9c9b959da68d357c28c562792300dd21a095118ed16"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:31eb671150b9c62409a888850aaa8e6533635704fe2b78335f9aaf7ff81eec4d"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b55c1f64482f7d8bd39942f376bfdf2f6aec637ee8c805b5041e14eeb771db"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24743a7b372e9a76171f6b69c01aedf927e8ac3e16c474d9fe20d552a8cb45c7"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:389c29045ee8bbb1627ea190b4976a310a295559eaf9f1464a1a6f2bf84dde78"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23690b5827e643150cf7b49569679ec13fe9a610a15949ed48b85eb7f98f34ec"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f0c9266c26580e7243ad0d72fc3e01d6b33866cfab5084a6da7576bcf1c4f72"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4c6c4db5d73d179746951486df97fd25e92396be07fc29ee8ff9a8f5afbdfb27"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3b695a8fa799dd2cfdb4804b37096c5f6dba1ac7f48a7fbf6d0485bcd060316"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6aa1bfce3f83baf00d9c5fcdbba93a3ab79958b4c7d7d1f55e7fe68c20e63912"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b0f9dceb221792b3ee6acb5438eb1f02b0cb2c247796a72b016dcc92c6de829"}, + {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5d0145edba8abd3db0ab22b5300c99dc152f5c9021fab861be0f0544dc3cbc5f"}, + {file = "rpds_py-0.28.0.tar.gz", hash = "sha256:abd4df20485a0983e2ca334a216249b6186d6e3c1627e106651943dbdb791aea"}, ] [[package]] name = "semantic-version" version = "2.10.0" description = "A library implementing the 'SemVer' scheme." -category = "main" optional = false python-versions = ">=2.7" +groups = ["main"] files = [ {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] [package.extras] -dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"] +dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1) ; python_version == \"3.4\"", "coverage", "flake8", "nose2", "readme-renderer (<25.0) ; python_version == \"3.4\"", "tox", "wheel", "zest.releaser[recommended]"] doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] name = "send2trash" version = "1.8.3" description = "Send file to trash natively under Mac OS X, Windows and Linux" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["dev"] files = [ {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, ] [package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] +nativelib = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\"", "pywin32 ; sys_platform == \"win32\""] +objc = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\""] +win32 = ["pywin32 ; sys_platform == \"win32\""] [[package]] name = "setuptools" -version = "75.8.0" +version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, - {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.14.0,<1.15.0)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -3829,9 +4591,9 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -3839,105 +4601,105 @@ files = [ [[package]] name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" +version = "3.0.1" +description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" +groups = ["dev"] files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, + {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, + {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, ] [[package]] name = "soupsieve" -version = "2.6" +version = "2.8" description = "A modern CSS selector implementation for Beautiful Soup." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, + {file = "soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c"}, + {file = "soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f"}, ] [[package]] name = "sqlalchemy" -version = "2.0.38" +version = "2.0.44" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e1d9e429028ce04f187a9f522818386c8b076723cdbe9345708384f49ebcec6"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b87a90f14c68c925817423b0424381f0e16d80fc9a1a1046ef202ab25b19a444"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:402c2316d95ed90d3d3c25ad0390afa52f4d2c56b348f212aa9c8d072a40eee5"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6493bc0eacdbb2c0f0d260d8988e943fee06089cd239bd7f3d0c45d1657a70e2"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0561832b04c6071bac3aad45b0d3bb6d2c4f46a8409f0a7a9c9fa6673b41bc03"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49aa2cdd1e88adb1617c672a09bf4ebf2f05c9448c6dbeba096a3aeeb9d4d443"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-win32.whl", hash = "sha256:64aa8934200e222f72fcfd82ee71c0130a9c07d5725af6fe6e919017d095b297"}, - {file = "SQLAlchemy-2.0.38-cp310-cp310-win_amd64.whl", hash = "sha256:c57b8e0841f3fce7b703530ed70c7c36269c6d180ea2e02e36b34cb7288c50c7"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bf89e0e4a30714b357f5d46b6f20e0099d38b30d45fa68ea48589faf5f12f62d"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8455aa60da49cb112df62b4721bd8ad3654a3a02b9452c783e651637a1f21fa2"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f53c0d6a859b2db58332e0e6a921582a02c1677cc93d4cbb36fdf49709b327b2"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c4817dff8cef5697f5afe5fec6bc1783994d55a68391be24cb7d80d2dbc3a6"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9cea5b756173bb86e2235f2f871b406a9b9d722417ae31e5391ccaef5348f2c"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40e9cdbd18c1f84631312b64993f7d755d85a3930252f6276a77432a2b25a2f3"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-win32.whl", hash = "sha256:cb39ed598aaf102251483f3e4675c5dd6b289c8142210ef76ba24aae0a8f8aba"}, - {file = "SQLAlchemy-2.0.38-cp311-cp311-win_amd64.whl", hash = "sha256:f9d57f1b3061b3e21476b0ad5f0397b112b94ace21d1f439f2db472e568178ae"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12d5b06a1f3aeccf295a5843c86835033797fea292c60e72b07bcb5d820e6dd3"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e036549ad14f2b414c725349cce0772ea34a7ab008e9cd67f9084e4f371d1f32"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3bee874cb1fadee2ff2b79fc9fc808aa638670f28b2145074538d4a6a5028e"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e185ea07a99ce8b8edfc788c586c538c4b1351007e614ceb708fd01b095ef33e"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b79ee64d01d05a5476d5cceb3c27b5535e6bb84ee0f872ba60d9a8cd4d0e6579"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afd776cf1ebfc7f9aa42a09cf19feadb40a26366802d86c1fba080d8e5e74bdd"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-win32.whl", hash = "sha256:a5645cd45f56895cfe3ca3459aed9ff2d3f9aaa29ff7edf557fa7a23515a3725"}, - {file = "SQLAlchemy-2.0.38-cp312-cp312-win_amd64.whl", hash = "sha256:1052723e6cd95312f6a6eff9a279fd41bbae67633415373fdac3c430eca3425d"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ecef029b69843b82048c5b347d8e6049356aa24ed644006c9a9d7098c3bd3bfd"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c8bcad7fc12f0cc5896d8e10fdf703c45bd487294a986903fe032c72201596b"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0ef3f98175d77180ffdc623d38e9f1736e8d86b6ba70bff182a7e68bed7727"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ac78898c50e2574e9f938d2e5caa8fe187d7a5b69b65faa1ea4648925b096"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9eb4fa13c8c7a2404b6a8e3772c17a55b1ba18bc711e25e4d6c0c9f5f541b02a"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dba1cdb8f319084f5b00d41207b2079822aa8d6a4667c0f369fce85e34b0c86"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-win32.whl", hash = "sha256:eae27ad7580529a427cfdd52c87abb2dfb15ce2b7a3e0fc29fbb63e2ed6f8120"}, - {file = "SQLAlchemy-2.0.38-cp313-cp313-win_amd64.whl", hash = "sha256:b335a7c958bc945e10c522c069cd6e5804f4ff20f9a744dd38e748eb602cbbda"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40310db77a55512a18827488e592965d3dec6a3f1e3d8af3f8243134029daca3"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d3043375dd5bbcb2282894cbb12e6c559654c67b5fffb462fda815a55bf93f7"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70065dfabf023b155a9c2a18f573e47e6ca709b9e8619b2e04c54d5bcf193178"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:c058b84c3b24812c859300f3b5abf300daa34df20d4d4f42e9652a4d1c48c8a4"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0398361acebb42975deb747a824b5188817d32b5c8f8aba767d51ad0cc7bb08d"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-win32.whl", hash = "sha256:a2bc4e49e8329f3283d99840c136ff2cd1a29e49b5624a46a290f04dff48e079"}, - {file = "SQLAlchemy-2.0.38-cp37-cp37m-win_amd64.whl", hash = "sha256:9cd136184dd5f58892f24001cdce986f5d7e96059d004118d5410671579834a4"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:665255e7aae5f38237b3a6eae49d2358d83a59f39ac21036413fab5d1e810578"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:92f99f2623ff16bd4aaf786ccde759c1f676d39c7bf2855eb0b540e1ac4530c8"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa498d1392216fae47eaf10c593e06c34476ced9549657fca713d0d1ba5f7248"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9afbc3909d0274d6ac8ec891e30210563b2c8bdd52ebbda14146354e7a69373"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:57dd41ba32430cbcc812041d4de8d2ca4651aeefad2626921ae2a23deb8cd6ff"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3e35d5565b35b66905b79ca4ae85840a8d40d31e0b3e2990f2e7692071b179ca"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-win32.whl", hash = "sha256:f0d3de936b192980209d7b5149e3c98977c3810d401482d05fb6d668d53c1c63"}, - {file = "SQLAlchemy-2.0.38-cp38-cp38-win_amd64.whl", hash = "sha256:3868acb639c136d98107c9096303d2d8e5da2880f7706f9f8c06a7f961961149"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07258341402a718f166618470cde0c34e4cec85a39767dce4e24f61ba5e667ea"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a826f21848632add58bef4f755a33d45105d25656a0c849f2dc2df1c71f6f50"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:386b7d136919bb66ced64d2228b92d66140de5fefb3c7df6bd79069a269a7b06"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f2951dc4b4f990a4b394d6b382accb33141d4d3bd3ef4e2b27287135d6bdd68"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8bf312ed8ac096d674c6aa9131b249093c1b37c35db6a967daa4c84746bc1bc9"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6db316d6e340f862ec059dc12e395d71f39746a20503b124edc255973977b728"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-win32.whl", hash = "sha256:c09a6ea87658695e527104cf857c70f79f14e9484605e205217aae0ec27b45fc"}, - {file = "SQLAlchemy-2.0.38-cp39-cp39-win_amd64.whl", hash = "sha256:12f5c9ed53334c3ce719155424dc5407aaa4f6cadeb09c5b627e06abb93933a1"}, - {file = "SQLAlchemy-2.0.38-py3-none-any.whl", hash = "sha256:63178c675d4c80def39f1febd625a6333f44c0ba269edd8a468b156394b27753"}, - {file = "sqlalchemy-2.0.38.tar.gz", hash = "sha256:e5a4d82bdb4bf1ac1285a68eab02d253ab73355d9f0fe725a97e1e0fa689decb"}, +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.44-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:471733aabb2e4848d609141a9e9d56a427c0a038f4abf65dd19d7a21fd563632"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48bf7d383a35e668b984c805470518b635d48b95a3c57cb03f37eaa3551b5f9f"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf4bb6b3d6228fcf3a71b50231199fb94d2dd2611b66d33be0578ea3e6c2726"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:e998cf7c29473bd077704cea3577d23123094311f59bdc4af551923b168332b1"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ebac3f0b5732014a126b43c2b7567f2f0e0afea7d9119a3378bde46d3dcad88e"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win32.whl", hash = "sha256:3255d821ee91bdf824795e936642bbf43a4c7cedf5d1aed8d24524e66843aa74"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win_amd64.whl", hash = "sha256:78e6c137ba35476adb5432103ae1534f2f5295605201d946a4198a0dea4b38e7"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c77f3080674fc529b1bd99489378c7f63fcb4ba7f8322b79732e0258f0ea3ce"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26ef74ba842d61635b0152763d057c8d48215d5be9bb8b7604116a059e9985"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a172b31785e2f00780eccab00bc240ccdbfdb8345f1e6063175b3ff12ad1b0"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9480c0740aabd8cb29c329b422fb65358049840b34aba0adf63162371d2a96e"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17835885016b9e4d0135720160db3095dc78c583e7b902b6be799fb21035e749"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cbe4f85f50c656d753890f39468fcd8190c5f08282caf19219f684225bfd5fd2"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win32.whl", hash = "sha256:2fcc4901a86ed81dc76703f3b93ff881e08761c63263c46991081fd7f034b165"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win_amd64.whl", hash = "sha256:9919e77403a483ab81e3423151e8ffc9dd992c20d2603bf17e4a8161111e55f5"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fc44e5965ea46909a416fff0af48a219faefd5773ab79e5f8a5fcd5d62b2667"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dc8b3850d2a601ca2320d081874033684e246d28e1c5e89db0864077cfc8f5a9"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d733dec0614bb8f4bcb7c8af88172b974f685a31dc3a65cca0527e3120de5606"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22be14009339b8bc16d6b9dc8780bacaba3402aa7581658e246114abbd2236e3"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:357bade0e46064f88f2c3a99808233e67b0051cdddf82992379559322dfeb183"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4848395d932e93c1595e59a8672aa7400e8922c39bb9b0668ed99ac6fa867822"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win32.whl", hash = "sha256:2f19644f27c76f07e10603580a47278abb2a70311136a7f8fd27dc2e096b9013"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win_amd64.whl", hash = "sha256:1df4763760d1de0dfc8192cc96d8aa293eb1a44f8f7a5fbe74caf1b551905c5e"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7027414f2b88992877573ab780c19ecb54d3a536bef3397933573d6b5068be4"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fe166c7d00912e8c10d3a9a0ce105569a31a3d0db1a6e82c4e0f4bf16d5eca9"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3caef1ff89b1caefc28f0368b3bde21a7e3e630c2eddac16abd9e47bd27cc36a"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc2856d24afa44295735e72f3c75d6ee7fdd4336d8d3a8f3d44de7aa6b766df2"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:11bac86b0deada30b6b5f93382712ff0e911fe8d31cb9bf46e6b149ae175eff0"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d18cd0e9a0f37c9f4088e50e3839fcb69a380a0ec957408e0b57cff08ee0a26"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win32.whl", hash = "sha256:9e9018544ab07614d591a26c1bd4293ddf40752cc435caf69196740516af7100"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win_amd64.whl", hash = "sha256:8e0e4e66fd80f277a8c3de016a81a554e76ccf6b8d881ee0b53200305a8433f6"}, + {file = "sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05"}, + {file = "sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = ">=1", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -3948,7 +4710,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -3961,9 +4723,9 @@ sqlcipher = ["sqlcipher3_binary"] name = "sqlalchemy-json" version = "0.7.0" description = "JSON type with nested change tracking for SQLAlchemy" -category = "main" optional = false python-versions = ">= 3.6" +groups = ["main"] files = [ {file = "sqlalchemy-json-0.7.0.tar.gz", hash = "sha256:620d0b26f648f21a8fa9127df66f55f83a5ab4ae010e5397a5c6989a08238561"}, {file = "sqlalchemy_json-0.7.0-py3-none-any.whl", hash = "sha256:27881d662ca18363a4ac28175cc47ea2a6f2bef997ae1159c151026b741818e6"}, @@ -3979,9 +4741,9 @@ dev = ["pytest"] name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -3999,9 +4761,9 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "terminado" version = "0.18.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, @@ -4021,9 +4783,9 @@ typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] name = "tinycss2" version = "1.4.0" description = "A tiny CSS parser" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, @@ -4038,86 +4800,98 @@ test = ["pytest", "ruff"] [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] +groups = ["main", "dev"] +files = [ + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, +] +markers = {main = "python_version == \"3.10\""} [[package]] name = "tomlkit" -version = "0.13.2" +version = "0.13.3" description = "Style preserving TOML library" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, + {file = "tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0"}, + {file = "tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1"}, ] [[package]] name = "tornado" -version = "6.4.2" +version = "6.5.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, - {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, - {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, - {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, + {file = "tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6"}, + {file = "tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef"}, + {file = "tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e"}, + {file = "tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882"}, + {file = "tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108"}, + {file = "tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c"}, + {file = "tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4"}, + {file = "tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04"}, + {file = "tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0"}, + {file = "tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f"}, + {file = "tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af"}, + {file = "tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0"}, ] [[package]] name = "traitlets" version = "5.14.3" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -4129,23 +4903,23 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-python-dateutil" -version = "2.9.0.20241206" +version = "2.9.0.20251108" description = "Typing stubs for python-dateutil" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, - {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, + {file = "types_python_dateutil-2.9.0.20251108-py3-none-any.whl", hash = "sha256:a4a537f0ea7126f8ccc2763eec9aa31ac8609e3c8e530eb2ddc5ee234b3cd764"}, + {file = "types_python_dateutil-2.9.0.20251108.tar.gz", hash = "sha256:d8a6687e197f2fa71779ce36176c666841f811368710ab8d274b876424ebfcaa"}, ] [[package]] name = "types-requests" version = "2.31.0.6" description = "Typing stubs for requests" -category = "main" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, @@ -4158,9 +4932,9 @@ types-urllib3 = "*" name = "types-urllib3" version = "1.26.25.14" description = "Typing stubs for urllib3" -category = "main" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -4170,9 +4944,9 @@ files = [ name = "types-waitress" version = "2.1.4.20240421" description = "Typing stubs for waitress" -category = "dev" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-waitress-2.1.4.20240421.tar.gz", hash = "sha256:3f961b452865979ba6a09dd3ea79bcce1cfee685a01aad03766c4f9d564651c6"}, {file = "types_waitress-2.1.4.20240421-py3-none-any.whl", hash = "sha256:0c2d39265e096add609f4d8085f1bf1721e0a91a602a1f0a9187f3f8f3a2a328"}, @@ -4180,35 +4954,35 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] [[package]] name = "tzdata" -version = "2025.1" +version = "2025.2" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" +groups = ["dev"] files = [ - {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, - {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] name = "uri-template" version = "1.3.0" description = "RFC 6570 URI Template Processor" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, @@ -4221,26 +4995,26 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake name = "urllib3" version = "1.26.20" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "validators-0.20.0.tar.gz", hash = "sha256:24148ce4e64100a2d5e267233e23e7afeb55316b47d30faae7eb6e7292bc226a"}, ] @@ -4255,9 +5029,9 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "waitress" version = "2.1.2" description = "Waitress WSGI server" -category = "main" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "waitress-2.1.2-py3-none-any.whl", hash = "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a"}, {file = "waitress-2.1.2.tar.gz", hash = "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba"}, @@ -4271,9 +5045,9 @@ testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] name = "watchdog" version = "6.0.0" description = "Filesystem events monitoring" -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, @@ -4312,35 +5086,35 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.13" +version = "0.2.14" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" +groups = ["dev"] files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, + {file = "wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1"}, + {file = "wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605"}, ] [[package]] name = "webcolors" -version = "24.11.1" +version = "25.10.0" description = "A library for working with the color formats defined by HTML and CSS." -category = "dev" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" +groups = ["dev"] files = [ - {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, - {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, + {file = "webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d"}, + {file = "webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf"}, ] [[package]] name = "webencodings" version = "0.5.1" description = "Character encoding aliases for legacy web content" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -4348,28 +5122,28 @@ files = [ [[package]] name = "websocket-client" -version = "1.8.0" +version = "1.9.0" description = "WebSocket client for Python with low level API options" -category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, + {file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"}, + {file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"}, ] [package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx_rtd_theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] -test = ["websockets"] +test = ["pytest", "websockets"] [[package]] name = "werkzeug" version = "3.1.3" description = "The comprehensive WSGI web application library." -category = "main" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -4383,126 +5157,273 @@ watchdog = ["watchdog (>=2.3)"] [[package]] name = "widgetsnbextension" -version = "4.0.13" +version = "4.0.15" description = "Jupyter interactive widgets for Jupyter Notebook" -category = "dev" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ - {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, - {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, + {file = "widgetsnbextension-4.0.15-py3-none-any.whl", hash = "sha256:8156704e4346a571d9ce73b84bee86a29906c9abfd7223b7228a28899ccf3366"}, + {file = "widgetsnbextension-4.0.15.tar.gz", hash = "sha256:de8610639996f1567952d763a5a41af8af37f2575a41f9852a38f947eb82a3b9"}, ] [[package]] name = "wrapt" -version = "1.17.2" +version = "1.17.3" description = "Module for decorators, wrappers and monkey patching." -category = "dev" optional = false python-versions = ">=3.8" -files = [ - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, - {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, - {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, - {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, - {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, - {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, - {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, - {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, - {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, - {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, - {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, - {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, - {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, - {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, - {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, - {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, - {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, - {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, - {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, - {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, - {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, - {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, - {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, - {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, - {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, - {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, - {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, - {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, - {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, - {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, - {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, - {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, - {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, - {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, - {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, - {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, - {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, - {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, +groups = ["dev"] +files = [ + {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}, + {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}, + {file = "wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c"}, + {file = "wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775"}, + {file = "wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd"}, + {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05"}, + {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418"}, + {file = "wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390"}, + {file = "wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6"}, + {file = "wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f"}, + {file = "wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311"}, + {file = "wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1"}, + {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5"}, + {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2"}, + {file = "wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89"}, + {file = "wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77"}, + {file = "wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd"}, + {file = "wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828"}, + {file = "wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9"}, + {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396"}, + {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc"}, + {file = "wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe"}, + {file = "wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"}, + {file = "wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7"}, + {file = "wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277"}, + {file = "wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d"}, + {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa"}, + {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050"}, + {file = "wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8"}, + {file = "wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb"}, + {file = "wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c"}, + {file = "wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b"}, + {file = "wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa"}, + {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7"}, + {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4"}, + {file = "wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10"}, + {file = "wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6"}, + {file = "wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454"}, + {file = "wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e"}, + {file = "wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f"}, + {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056"}, + {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804"}, + {file = "wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977"}, + {file = "wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116"}, + {file = "wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:70d86fa5197b8947a2fa70260b48e400bf2ccacdcab97bb7de47e3d1e6312225"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df7d30371a2accfe4013e90445f6388c570f103d61019b6b7c57e0265250072a"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:caea3e9c79d5f0d2c6d9ab96111601797ea5da8e6d0723f77eabb0d4068d2b2f"}, + {file = "wrapt-1.17.3-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:758895b01d546812d1f42204bd443b8c433c44d090248bf22689df673ccafe00"}, + {file = "wrapt-1.17.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02b551d101f31694fc785e58e0720ef7d9a10c4e62c1c9358ce6f63f23e30a56"}, + {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:656873859b3b50eeebe6db8b1455e99d90c26ab058db8e427046dbc35c3140a5"}, + {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a9a2203361a6e6404f80b99234fe7fb37d1fc73487b5a78dc1aa5b97201e0f22"}, + {file = "wrapt-1.17.3-cp38-cp38-win32.whl", hash = "sha256:55cbbc356c2842f39bcc553cf695932e8b30e30e797f961860afb308e6b1bb7c"}, + {file = "wrapt-1.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad85e269fe54d506b240d2d7b9f5f2057c2aa9a2ea5b32c66f8902f768117ed2"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:30ce38e66630599e1193798285706903110d4f057aab3168a34b7fdc85569afc"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65d1d00fbfb3ea5f20add88bbc0f815150dbbde3b026e6c24759466c8b5a9ef9"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7c06742645f914f26c7f1fa47b8bc4c91d222f76ee20116c43d5ef0912bba2d"}, + {file = "wrapt-1.17.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7e18f01b0c3e4a07fe6dfdb00e29049ba17eadbc5e7609a2a3a4af83ab7d710a"}, + {file = "wrapt-1.17.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f5f51a6466667a5a356e6381d362d259125b57f059103dd9fdc8c0cf1d14139"}, + {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:59923aa12d0157f6b82d686c3fd8e1166fa8cdfb3e17b42ce3b6147ff81528df"}, + {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46acc57b331e0b3bcb3e1ca3b421d65637915cfcd65eb783cb2f78a511193f9b"}, + {file = "wrapt-1.17.3-cp39-cp39-win32.whl", hash = "sha256:3e62d15d3cfa26e3d0788094de7b64efa75f3a53875cdbccdf78547aed547a81"}, + {file = "wrapt-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:1f23fa283f51c890eda8e34e4937079114c74b4c81d2b2f1f1d94948f5cc3d7f"}, + {file = "wrapt-1.17.3-cp39-cp39-win_arm64.whl", hash = "sha256:24c2ed34dc222ed754247a2702b1e1e89fdbaa4016f324b4b8f1a802d4ffe87f"}, + {file = "wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"}, + {file = "wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0"}, +] + +[[package]] +name = "yarl" +version = "1.22.0" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467"}, + {file = "yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea"}, + {file = "yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca"}, + {file = "yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e"}, + {file = "yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca"}, + {file = "yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b"}, + {file = "yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520"}, + {file = "yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8"}, + {file = "yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c"}, + {file = "yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67"}, + {file = "yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95"}, + {file = "yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d"}, + {file = "yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62"}, + {file = "yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03"}, + {file = "yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249"}, + {file = "yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da"}, + {file = "yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2"}, + {file = "yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79"}, + {file = "yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c"}, + {file = "yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e"}, + {file = "yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27"}, + {file = "yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8"}, + {file = "yarl-1.22.0-cp39-cp39-win32.whl", hash = "sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b"}, + {file = "yarl-1.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed"}, + {file = "yarl-1.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2"}, + {file = "yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff"}, + {file = "yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71"}, ] +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + [[package]] name = "zipp" -version = "3.21.0" +version = "3.23.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.10" -content-hash = "cf4657c4a05782f8af6e317d9a2c13b8a58ddfb461dbcd85b15c7b202f9c52a7" +content-hash = "ea530ff343914cbd5b4bdb672bbafd114d85a99e2901c58a57820f13599520b8" diff --git a/pyproject.toml b/pyproject.toml index f4618493..edad6ae5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ readme = "README.md" homepage = "https://pypi.org/project/pyfairdatatools" documentation = "https://pyfairdatatools.readthedocs.io" repository = "https://github.com/AI-READI/pyfairdatatools" - +package-mode = false keywords = [] classifiers = [ @@ -78,12 +78,12 @@ growthbook = "^1.0.0" pyfairdatatools = "0.1.3" # Dashboard ETL -pandas = "^2.2.0" numpy = "^1.26.4" pycap = "^2.6.0" azure-storage-blob = "^12.19.1" azure-communication-email = "^1.0.0" azure-storage-file-datalake = "^12.20.0" +polars = "1.35.2" [tool.poetry.group.dev.dependencies] @@ -201,4 +201,4 @@ markers = [] [build-system] requires = ["poetry-core>=1.3.2"] -build-backend = "poetry.core.masonry.api" \ No newline at end of file +build-backend = "poetry.core.masonry.api" From a8f75cce198900bcb2abb032acac28563911a7c9 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 17:50:18 -0800 Subject: [PATCH 496/505] fix: package-mode key removed --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index edad6ae5..e8cd533f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,6 @@ readme = "README.md" homepage = "https://pypi.org/project/pyfairdatatools" documentation = "https://pyfairdatatools.readthedocs.io" repository = "https://github.com/AI-READI/pyfairdatatools" -package-mode = false keywords = [] classifiers = [ From 92c56ed9a46cc857cbe7a77ba0840dd43602e720 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 17:59:53 -0800 Subject: [PATCH 497/505] fix: Polars compilation issue --- Dockerfile | 1 + pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 7e50e6a0..e53c958b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,6 +4,7 @@ EXPOSE 5000 WORKDIR /app +ENV POLARS_REQUIRE_BIN=1 ENV POETRY_VERSION=1.3.2 RUN apk update diff --git a/pyproject.toml b/pyproject.toml index e8cd533f..f9cc2491 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ pycap = "^2.6.0" azure-storage-blob = "^12.19.1" azure-communication-email = "^1.0.0" azure-storage-file-datalake = "^12.20.0" -polars = "1.35.2" +polars = { version = "^1.35", extras = ["lazy"] } [tool.poetry.group.dev.dependencies] From 47b236494dfa4bb11ec3e435e91261327cf7be70 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 18:05:58 -0800 Subject: [PATCH 498/505] fix: attempt rust compilation step --- Dockerfile | 5 ++++- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index e53c958b..3733108b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,12 +4,15 @@ EXPOSE 5000 WORKDIR /app -ENV POLARS_REQUIRE_BIN=1 ENV POETRY_VERSION=1.3.2 RUN apk update RUN apk add --no-cache gcc libffi-dev musl-dev postgresql-dev +# Install Rust +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +ENV PATH="/root/.cargo/bin:${PATH}" + RUN pip install "poetry==$POETRY_VERSION" COPY poetry.lock pyproject.toml ./ diff --git a/pyproject.toml b/pyproject.toml index f9cc2491..e8cd533f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ pycap = "^2.6.0" azure-storage-blob = "^12.19.1" azure-communication-email = "^1.0.0" azure-storage-file-datalake = "^12.20.0" -polars = { version = "^1.35", extras = ["lazy"] } +polars = "1.35.2" [tool.poetry.group.dev.dependencies] From d665fbb70479660ddc5be04f5a399ea30b04601b Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 18:27:02 -0800 Subject: [PATCH 499/505] fix: Try moving from Alpine to Debian for Polars installation --- Dockerfile | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3733108b..bb5d4dbd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,38 +1,46 @@ -FROM python:3.10-alpine +# Use official slim Python image +FROM python:3.10-slim +# Expose the port your app will run on EXPOSE 5000 +# Set working directory WORKDIR /app +# Environment ENV POETRY_VERSION=1.3.2 +ENV PIP_ONLY_BINARY=:all: # Force pip to use pre-built wheels -RUN apk update -RUN apk add --no-cache gcc libffi-dev musl-dev postgresql-dev - -# Install Rust -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -ENV PATH="/root/.cargo/bin:${PATH}" +# Install system dependencies for building Python packages +RUN apt-get update && apt-get install -y --no-install-recommends \ + gcc \ + build-essential \ + libffi-dev \ + libpq-dev \ + && rm -rf /var/lib/apt/lists/* +# Install Poetry RUN pip install "poetry==$POETRY_VERSION" -COPY poetry.lock pyproject.toml ./ - +# Install Python dependencies +COPY pyproject.toml poetry.lock ./ RUN poetry config virtualenvs.create false RUN poetry install --no-root +# Copy source code and config COPY apis ./apis COPY model ./model COPY core ./core COPY modules ./modules -COPY app.py . -COPY config.py . -COPY caching.py . - +COPY app.py config.py caching.py ./ +# Copy database/migration setup COPY alembic ./alembic COPY alembic.ini . - +# Copy runtime entrypoint COPY entrypoint.sh . +COPY alembic alembic.ini ./ +COPY entrypoint.sh ./ +# Setup Entrypoint RUN chmod +x entrypoint.sh - ENTRYPOINT ["./entrypoint.sh"] From d3ab6eb628bb06ed8890341f26442c53a05520d7 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 18:31:00 -0800 Subject: [PATCH 500/505] fix: Minor fix --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index bb5d4dbd..1488a7ff 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,7 +9,8 @@ WORKDIR /app # Environment ENV POETRY_VERSION=1.3.2 -ENV PIP_ONLY_BINARY=:all: # Force pip to use pre-built wheels +# Note: Force pip to use pre-built wheels +ENV PIP_ONLY_BINARY=:all: # Install system dependencies for building Python packages RUN apt-get update && apt-get install -y --no-install-recommends \ From bf093012e33aa1b1869185d90d2061c7a65ba072 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 19:26:56 -0800 Subject: [PATCH 501/505] fix: OS issue with Polars and CSV handling --- modules/etl/transforms/redcap_live_transform.py | 3 ++- modules/etl/transforms/redcap_release_transform.py | 10 ++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/modules/etl/transforms/redcap_live_transform.py b/modules/etl/transforms/redcap_live_transform.py index 0931f76b..d01a3f3d 100644 --- a/modules/etl/transforms/redcap_live_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -73,7 +73,7 @@ def __init__(self, config: dict): # Configure Logging logging.basicConfig(**self.logging_config) - self.logger = logging.getLogger("RedcapTransform") + self.logger = logging.getLogger("RedcapTransform:Live") # # REDCap Parsing Variables @@ -125,6 +125,7 @@ def __init__(self, config: dict): self.project: Any = None self.reports: Dict[str, Any] = {} + self.merged: pl.DataFrame = pl.DataFrame([]) def run(self): """ diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py index f18976a9..8ab5f9ce 100644 --- a/modules/etl/transforms/redcap_release_transform.py +++ b/modules/etl/transforms/redcap_release_transform.py @@ -73,7 +73,7 @@ def __init__(self, config: dict): # Configure Logging logging.basicConfig(**self.logging_config) - self.logger = logging.getLogger("RedcapTransform") + self.logger = logging.getLogger("RedcapTransform:Release") # # REDCap Parsing Variables @@ -125,6 +125,7 @@ def __init__(self, config: dict): self.project: Any = None self.reports: Dict[str, Any] = {} + self.merged: pl.DataFrame = pl.DataFrame([]) def run (self): @@ -226,13 +227,14 @@ def get_stored_report(self, connection_string: str, container_name: str, blob_pa if not report_buffer: df = pl.DataFrame([]) else: - # Calculate schema to force Utf8 to prevent type inference issues on ragged data + # Load DataFrame df = pl.read_csv( report_buffer, separator=",", - infer_schema_length=0, # Helps with performance/consistency - schema_overrides={'*': pl.Utf8} + infer_schema_length=0 ) + # Convert all columns to Utf8 + df = df.select([pl.col(c).cast(pl.Utf8) for c in df.columns]) self.logger.info(f"Successfully loaded report into Polars DataFrame with shape {df.shape}") From 7814b6812e95fcd370a5ea15b4b42ff9a3700463 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 19:44:03 -0800 Subject: [PATCH 502/505] fix: Try different CSV separator --- modules/etl/transforms/redcap_release_transform.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/etl/transforms/redcap_release_transform.py b/modules/etl/transforms/redcap_release_transform.py index 8ab5f9ce..d69ae905 100644 --- a/modules/etl/transforms/redcap_release_transform.py +++ b/modules/etl/transforms/redcap_release_transform.py @@ -120,7 +120,7 @@ def __init__(self, config: dict): "raw_or_label": "raw", "raw_or_label_headers": "raw", "export_checkbox_labels": False, - "csv_delimiter": ",", + "csv_delimiter": "|", } self.project: Any = None @@ -230,7 +230,7 @@ def get_stored_report(self, connection_string: str, container_name: str, blob_pa # Load DataFrame df = pl.read_csv( report_buffer, - separator=",", + separator="|", infer_schema_length=0 ) # Convert all columns to Utf8 From e37858b4db0986d65716ee911586e25c14a6ab92 Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 20:15:39 -0800 Subject: [PATCH 503/505] fix: Move to live transform --- apis/dashboard.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 8a5b5ef8..54e0670b 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -482,6 +482,13 @@ def get(self, study_id: str, dashboard_id: str): redcap_project_dashboard, live=True, ) + # transformConfig = redcapReleaseTransformConfig + # redcap_project_dashboard = execute_transform( + # transformConfig, + # redcap_project_view, + # redcap_project_dashboard, + # live=False, + # ) # Create Dashboard Redis Cache caching.cache.set( @@ -686,12 +693,12 @@ def get(self, study_id: str): # # Finalize ETL Config - transformConfig = redcapReleaseTransformConfig + transformConfig = redcapLiveTransformConfig redcap_project_dashboard = execute_transform( transformConfig, redcap_project_view, redcap_project_dashboard, - live=False, + live=True, ) # Create Dashboard Redis Cache From 2f4172c9bae341981d2013419ebaaba5b4988b2a Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 20:20:24 -0800 Subject: [PATCH 504/505] fix: Move to live transform --- apis/dashboard.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/apis/dashboard.py b/apis/dashboard.py index 54e0670b..7b322867 100644 --- a/apis/dashboard.py +++ b/apis/dashboard.py @@ -9,10 +9,9 @@ import caching import model from modules.etl import ModuleTransform, RedcapLiveTransform, RedcapReleaseTransform -from modules.etl.config import ( +from modules.etl.config import ( # redcapReleaseTransformConfig, moduleTransformConfigs, redcapLiveTransformConfig, - redcapReleaseTransformConfig, ) from .authentication import is_granted @@ -653,8 +652,6 @@ def get(self, study_id: str): """Get REDCap project dashboard""" model.db.session.flush() study = model.db.session.query(model.Study).get(study_id) - # if not is_granted("view", study): - # return "Access denied, you can not view this dashboard", 403 # Get Dashboard redcap_project_dashboards_query = model.StudyDashboard.query.filter_by( From 003d8d83bc4e1711bb21b76cb479d26d4c48ddeb Mon Sep 17 00:00:00 2001 From: Greenstick Date: Fri, 14 Nov 2025 20:36:11 -0800 Subject: [PATCH 505/505] feat: Additional logging on live transform --- modules/etl/transforms/redcap_live_transform.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/etl/transforms/redcap_live_transform.py b/modules/etl/transforms/redcap_live_transform.py index d01a3f3d..be0cf6ea 100644 --- a/modules/etl/transforms/redcap_live_transform.py +++ b/modules/etl/transforms/redcap_live_transform.py @@ -142,6 +142,7 @@ def run(self): # Load REDCap Project Metadata self.metadata: Any = self.project.export_metadata() + self.logger.info(f"API Request to metadata received metadata with length: {len(self.metadata)}") self.logger.info(f"Retrieving Live REDCap reports") for report_config in self.reports_configs: @@ -152,6 +153,7 @@ def run(self): # PyCap returns a list of dicts by default. report_data: Any = self.project.export_report(**report_kwdargs) + self.logger.info(f"API Request to report {report_key} received data with length: {len(report_data)}") # Convert to Polars ensuring all columns are input as UTF8 Strings if not report_data: